From 7b66c32b7086764b85ec039d25c5090f96df2eda Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 28 Aug 2023 09:55:50 -0700 Subject: [PATCH 01/41] feat(ingest): support writing configs to files (#8696) --- .../datahub/configuration/config_loader.py | 44 +++++++++++++-- .../utilities/urns/data_platform_urn.py | 3 + .../tests/unit/config/basic.yml | 4 ++ .../tests/unit/config/test_config_loader.py | 55 ++++++++++++++++++- .../unit/config/write_to_file_directive.yml | 11 ++++ 5 files changed, 110 insertions(+), 7 deletions(-) create mode 100644 metadata-ingestion/tests/unit/config/write_to_file_directive.yml diff --git a/metadata-ingestion/src/datahub/configuration/config_loader.py b/metadata-ingestion/src/datahub/configuration/config_loader.py index 78bee21d1bda4..30ca4ff6aed2d 100644 --- a/metadata-ingestion/src/datahub/configuration/config_loader.py +++ b/metadata-ingestion/src/datahub/configuration/config_loader.py @@ -2,6 +2,7 @@ import pathlib import re import sys +import tempfile import unittest.mock from typing import Any, Dict, Set, Union from urllib import parse @@ -14,7 +15,7 @@ from datahub.configuration.yaml import YamlConfigurationMechanism -def resolve_element(element: str) -> str: +def _resolve_element(element: str) -> str: if re.search(r"(\$\{).+(\})", element): return expandvars(element, nounset=True) elif element.startswith("$"): @@ -30,7 +31,7 @@ def _resolve_list(ele_list: list) -> list: new_v: list = [] for ele in ele_list: if isinstance(ele, str): - new_v.append(resolve_element(ele)) + new_v.append(_resolve_element(ele)) elif isinstance(ele, list): new_v.append(_resolve_list(ele)) elif isinstance(ele, dict): @@ -48,7 +49,7 @@ def resolve_env_variables(config: dict) -> dict: elif isinstance(v, list): new_dict[k] = _resolve_list(v) elif isinstance(v, str): - new_dict[k] = resolve_element(v) + new_dict[k] = _resolve_element(v) else: new_dict[k] = v return new_dict @@ -67,12 +68,40 @@ def list_referenced_env_variables(config: dict) -> Set[str]: return set([call[1][0] for call in calls]) +WRITE_TO_FILE_DIRECTIVE_PREFIX = "__DATAHUB_TO_FILE_" + + +def _process_directives(config: dict) -> dict: + def _process(obj: Any) -> Any: + if isinstance(obj, dict): + new_obj = {} + for k, v in obj.items(): + if isinstance(k, str) and k.startswith(WRITE_TO_FILE_DIRECTIVE_PREFIX): + # This writes the value to a temporary file and replaces the value with the path to the file. + config_option = k[len(WRITE_TO_FILE_DIRECTIVE_PREFIX) :] + + with tempfile.NamedTemporaryFile("w", delete=False) as f: + filepath = f.name + f.write(v) + + new_obj[config_option] = filepath + else: + new_obj[k] = _process(v) + + return new_obj + else: + return obj + + return _process(config) + + def load_config_file( config_file: Union[str, pathlib.Path], squirrel_original_config: bool = False, squirrel_field: str = "__orig_config", allow_stdin: bool = False, resolve_env_vars: bool = True, + process_directives: bool = True, ) -> dict: config_mech: ConfigurationMechanism if allow_stdin and config_file == "-": @@ -105,10 +134,13 @@ def load_config_file( config_fp = io.StringIO(raw_config_file) raw_config = config_mech.load_config(config_fp) + + config = raw_config.copy() if resolve_env_vars: - config = resolve_env_variables(raw_config) - else: - config = raw_config + config = resolve_env_variables(config) + if process_directives: + config = _process_directives(config) + if squirrel_original_config: config[squirrel_field] = raw_config return config diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py index 8732592b80020..79cf54dfe920a 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py @@ -29,3 +29,6 @@ def _validate_entity_type(entity_type: str) -> None: raise InvalidUrnError( f"Entity type should be {DataPlatformUrn.ENTITY_TYPE} but found {entity_type}" ) + + def get_platform_name(self) -> str: + return self.get_entity_id()[0] diff --git a/metadata-ingestion/tests/unit/config/basic.yml b/metadata-ingestion/tests/unit/config/basic.yml index cc5372a05d84a..ce9f3b3f8cf94 100644 --- a/metadata-ingestion/tests/unit/config/basic.yml +++ b/metadata-ingestion/tests/unit/config/basic.yml @@ -5,3 +5,7 @@ nested: array: - one - two + numbers: + 4: "four" + 6: "six" + "8": "eight" diff --git a/metadata-ingestion/tests/unit/config/test_config_loader.py b/metadata-ingestion/tests/unit/config/test_config_loader.py index e29aa3b0b582c..3253c96b876aa 100644 --- a/metadata-ingestion/tests/unit/config/test_config_loader.py +++ b/metadata-ingestion/tests/unit/config/test_config_loader.py @@ -1,6 +1,9 @@ import os +import pathlib +import textwrap from unittest import mock +import deepdiff import expandvars import pytest import yaml @@ -18,7 +21,14 @@ ( # Basic YAML load "tests/unit/config/basic.yml", - {"foo": "bar", "nested": {"array": ["one", "two"], "hi": "hello"}}, + { + "foo": "bar", + "nested": { + "array": ["one", "two"], + "hi": "hello", + "numbers": {4: "four", 6: "six", "8": "eight"}, + }, + }, {}, set(), ), @@ -165,3 +175,46 @@ def test_load_error(pytestconfig, filename, env, error_type): with mock.patch.dict(os.environ, env): with pytest.raises(error_type): _ = load_config_file(filepath) + + +def test_write_file_directive(pytestconfig): + filepath = pytestconfig.rootpath / "tests/unit/config/write_to_file_directive.yml" + + fake_ssl_key = "my-secret-key-value" + + with mock.patch.dict(os.environ, {"DATAHUB_SSL_KEY": fake_ssl_key}): + loaded_config = load_config_file(filepath, squirrel_original_config=False) + + # Check that the rest of the dict is unmodified. + diff = deepdiff.DeepDiff( + loaded_config, + { + "foo": "bar", + "nested": { + "hi": "hello", + "another-key": "final-value", + }, + }, + exclude_paths=[ + "root['nested']['ssl_cert']", + "root['nested']['ssl_key']", + ], + ) + assert not diff + + # Check that the ssl_cert was written to a file. + ssl_cert_path = loaded_config["nested"]["ssl_cert"] + assert ( + pathlib.Path(ssl_cert_path).read_text() + == textwrap.dedent( + """ + -----BEGIN CERTIFICATE----- + thisisnotarealcert + -----END CERTIFICATE----- + """ + ).lstrip() + ) + + # Check that the ssl_key was written to a file. + ssl_key_path = loaded_config["nested"]["ssl_key"] + assert pathlib.Path(ssl_key_path).read_text() == fake_ssl_key diff --git a/metadata-ingestion/tests/unit/config/write_to_file_directive.yml b/metadata-ingestion/tests/unit/config/write_to_file_directive.yml new file mode 100644 index 0000000000000..e47f192096309 --- /dev/null +++ b/metadata-ingestion/tests/unit/config/write_to_file_directive.yml @@ -0,0 +1,11 @@ +foo: bar +nested: + hi: hello + __DATAHUB_TO_FILE_ssl_cert: | + -----BEGIN CERTIFICATE----- + thisisnotarealcert + -----END CERTIFICATE----- + + __DATAHUB_TO_FILE_ssl_key: ${DATAHUB_SSL_KEY} + + another-key: final-value From 2f11f24d83013a2498cf5e1d635eb174e1ec336c Mon Sep 17 00:00:00 2001 From: Indy Prentice Date: Mon, 28 Aug 2023 14:02:00 -0300 Subject: [PATCH 02/41] feat(search): De-duplicate scale factors across entities (#8718) Co-authored-by: Indy Prentice Co-authored-by: Indy Prentice --- .../query/request/SearchQueryBuilder.java | 31 ++++++++++++------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java index 49fc882314e0a..a00882cfde240 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java @@ -12,6 +12,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.metadata.Constants; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.SearchScoreFieldSpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchScoreAnnotation; import com.linkedin.metadata.models.annotation.SearchableAnnotation; @@ -322,23 +323,31 @@ private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotation finalScoreFunctions.add( new FunctionScoreQueryBuilder.FilterFunctionBuilder(ScoreFunctionBuilders.weightFactorFunction(1.0f))); - entitySpecs.stream() + Map annotations = entitySpecs.stream() .map(EntitySpec::getSearchableFieldSpecs) .flatMap(List::stream) .map(SearchableFieldSpec::getSearchableAnnotation) - .flatMap(annotation -> annotation - .getWeightsPerFieldValue() - .entrySet() - .stream() - .map(entry -> buildWeightFactorFunction(annotation.getFieldName(), entry.getKey(), - entry.getValue()))) - .forEach(finalScoreFunctions::add); + .collect(Collectors.toMap(SearchableAnnotation::getFieldName, annotation -> annotation, (annotation1, annotation2) -> annotation1)); + + for (Map.Entry annotationEntry : annotations.entrySet()) { + SearchableAnnotation annotation = annotationEntry.getValue(); + annotation + .getWeightsPerFieldValue() + .entrySet() + .stream() + .map(entry -> buildWeightFactorFunction(annotation.getFieldName(), entry.getKey(), + entry.getValue())).forEach(finalScoreFunctions::add); + } - entitySpecs.stream() + Map searchScoreAnnotationMap = entitySpecs.stream() .map(EntitySpec::getSearchScoreFieldSpecs) .flatMap(List::stream) - .map(fieldSpec -> buildScoreFunctionFromSearchScoreAnnotation(fieldSpec.getSearchScoreAnnotation())) - .forEach(finalScoreFunctions::add); + .map(SearchScoreFieldSpec::getSearchScoreAnnotation) + .collect(Collectors.toMap(SearchScoreAnnotation::getFieldName, annotation -> annotation, (annotation1, annotation2) -> annotation1)); + for (Map.Entry searchScoreAnnotationEntry : searchScoreAnnotationMap.entrySet()) { + SearchScoreAnnotation annotation = searchScoreAnnotationEntry.getValue(); + finalScoreFunctions.add(buildScoreFunctionFromSearchScoreAnnotation(annotation)); + } return finalScoreFunctions.toArray(new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]); } From 97019d8d8ff4a4fc7bc9deba66608e7b292b5672 Mon Sep 17 00:00:00 2001 From: Indy Prentice Date: Mon, 28 Aug 2023 14:02:21 -0300 Subject: [PATCH 03/41] test(lineage): Add test for scroll across lineage (#8728) Co-authored-by: Indy Prentice --- .../metadata/search/LineageSearchService.java | 4 +- .../search/LineageSearchServiceTest.java | 87 +++++++++++++++++++ .../fixtures/SampleDataFixtureTests.java | 1 + 3 files changed, 91 insertions(+), 1 deletion(-) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java index c561ddd38b919..9b8e9bce7e670 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java @@ -598,7 +598,9 @@ public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull List lineageRelationships = filterRelationships(lineageResult, new HashSet<>(entities), inputFilters); - return getScrollResultInBatches(lineageRelationships, input != null ? input : "*", inputFilters, sortCriterion, + Filter reducedFilters = + SearchUtils.removeCriteria(inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); + return getScrollResultInBatches(lineageRelationships, input != null ? input : "*", reducedFilters, sortCriterion, scrollId, keepAlive, size, searchFlags); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchServiceTest.java index 7b2978b747011..faff9f780e31c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchServiceTest.java @@ -317,6 +317,83 @@ public void testSearchService() throws Exception { } + @Test + public void testScrollAcrossLineage() throws Exception { + when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), + anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + LineageScrollResult scrollResult = scrollAcrossLineage(null, TEST1); + assertEquals(scrollResult.getNumEntities().intValue(), 0); + assertNull(scrollResult.getScrollId()); + scrollResult = scrollAcrossLineage(null, TEST1); + assertEquals(scrollResult.getNumEntities().intValue(), 0); + assertNull(scrollResult.getScrollId()); + clearCache(false); + + when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), + anyInt(), eq(null), eq(null))).thenReturn( + mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + // just testing null input does not throw any exception + scrollAcrossLineage(null, null); + + scrollResult = scrollAcrossLineage(null, TEST); + assertEquals(scrollResult.getNumEntities().intValue(), 0); + assertNull(scrollResult.getScrollId()); + scrollResult = scrollAcrossLineage(null, TEST1); + assertEquals(scrollResult.getNumEntities().intValue(), 0); + assertNull(scrollResult.getScrollId()); + clearCache(false); + + Urn urn = new TestEntityUrn("test1", "urn1", "VALUE_1"); + ObjectNode document = JsonNodeFactory.instance.objectNode(); + document.set("urn", JsonNodeFactory.instance.textNode(urn.toString())); + document.set("keyPart1", JsonNodeFactory.instance.textNode("test")); + document.set("textFieldOverride", JsonNodeFactory.instance.textNode("textFieldOverride")); + document.set("browsePaths", JsonNodeFactory.instance.textNode("/a/b/c")); + _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); + syncAfterWrite(_bulkProcessor); + + when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), + anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + scrollResult = scrollAcrossLineage(null, TEST1); + assertEquals(scrollResult.getNumEntities().intValue(), 0); + assertEquals(scrollResult.getEntities().size(), 0); + assertNull(scrollResult.getScrollId()); + clearCache(false); + + when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), + anyInt(), eq(null), eq(null))).thenReturn( + mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + scrollResult = scrollAcrossLineage(null, TEST1); + assertEquals(scrollResult.getNumEntities().intValue(), 1); + assertEquals(scrollResult.getEntities().get(0).getEntity(), urn); + assertEquals(scrollResult.getEntities().get(0).getDegree().intValue(), 1); + assertNull(scrollResult.getScrollId()); + + scrollResult = scrollAcrossLineage(QueryUtils.newFilter("degree.keyword", "1"), TEST1); + assertEquals(scrollResult.getNumEntities().intValue(), 1); + assertEquals(scrollResult.getEntities().get(0).getEntity(), urn); + assertEquals(scrollResult.getEntities().get(0).getDegree().intValue(), 1); + assertNull(scrollResult.getScrollId()); + + scrollResult = scrollAcrossLineage(QueryUtils.newFilter("degree.keyword", "2"), TEST1); + assertEquals(scrollResult.getNumEntities().intValue(), 0); + assertEquals(scrollResult.getEntities().size(), 0); + assertNull(scrollResult.getScrollId()); + clearCache(false); + + // Cleanup + _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); + syncAfterWrite(_bulkProcessor); + + when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), + anyInt())).thenReturn( + mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + scrollResult = scrollAcrossLineage(null, TEST1); + + assertEquals(scrollResult.getNumEntities().intValue(), 0); + assertNull(scrollResult.getScrollId()); + } + @Test public void testLightningSearchService() throws Exception { // Mostly this test ensures the code path is exercised @@ -731,6 +808,16 @@ private LineageSearchResult searchAcrossLineage(@Nullable Filter filter, @Nullab new SearchFlags().setSkipCache(true)); } + private LineageScrollResult scrollAcrossLineage(@Nullable Filter filter, @Nullable String input, String scrollId, int size) { + return _lineageSearchService.scrollAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), input, + null, filter, null, scrollId, "5m", size, null, null, + new SearchFlags().setSkipCache(true)); + } + + private LineageScrollResult scrollAcrossLineage(@Nullable Filter filter, @Nullable String input) { + return scrollAcrossLineage(filter, input, null, 10); + } + @Test public void testCanDoLightning() throws Exception { Map platformCounts = new HashMap<>(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/fixtures/SampleDataFixtureTests.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/fixtures/SampleDataFixtureTests.java index d989d4ef4fa87..450378b247cea 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/fixtures/SampleDataFixtureTests.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/fixtures/SampleDataFixtureTests.java @@ -161,6 +161,7 @@ public void testSearchFieldConfig() throws IOException { // this is a subfield therefore cannot have a subfield assertFalse(test.hasKeywordSubfield()); assertFalse(test.hasDelimitedSubfield()); + assertFalse(test.hasWordGramSubfields()); String[] fieldAndSubfield = test.fieldName().split("[.]", 2); From 3acd25ba1d2881597e5a0574331b6b81f7375d94 Mon Sep 17 00:00:00 2001 From: Kirill Popov Date: Mon, 28 Aug 2023 20:02:41 +0300 Subject: [PATCH 04/41] feat(ingest/metabase): detect source table for cards sourced from other cards (#8577) --- .../src/datahub/ingestion/source/metabase.py | 87 ++++++-- .../metabase/metabase_mces_golden.json | 70 +++++++ .../integration/metabase/setup/card.json | 194 +++++++++++++++++- .../integration/metabase/setup/card_3.json | 193 +++++++++++++++++ .../integration/metabase/test_metabase.py | 1 + 5 files changed, 531 insertions(+), 14 deletions(-) create mode 100644 metadata-ingestion/tests/integration/metabase/setup/card_3.json diff --git a/metadata-ingestion/src/datahub/ingestion/source/metabase.py b/metadata-ingestion/src/datahub/ingestion/source/metabase.py index 54c5888ee3312..fb4512893feb1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metabase.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metabase.py @@ -1,6 +1,6 @@ from datetime import datetime, timezone from functools import lru_cache -from typing import Dict, Iterable, List, Optional, Union +from typing import Dict, Iterable, List, Optional, Tuple, Union import dateutil.parser as dp import pydantic @@ -43,6 +43,8 @@ ) from datahub.utilities import config_clean +DATASOURCE_URN_RECURSION_LIMIT = 5 + class MetabaseConfig(DatasetLineageProviderConfigBase): # See the Metabase /api/session endpoint for details @@ -327,18 +329,43 @@ def emit_card_mces(self) -> Iterable[MetadataWorkUnit]: ) return None - def construct_card_from_api_data(self, card_data: dict) -> Optional[ChartSnapshot]: - card_id = card_data.get("id", "") + def get_card_details_by_id(self, card_id: Union[int, str]) -> dict: + """ + Method will attempt to get detailed information on card + from Metabase API by card ID and return this info as dict. + If information can't be retrieved, an empty dict is returned + to unify return value of failed call with successful call of the method. + :param Union[int, str] card_id: ID of card (question) in Metabase + :param int datasource_id: Numeric datasource ID received from Metabase API + :return: dict with info or empty dict + """ card_url = f"{self.config.connect_uri}/api/card/{card_id}" try: card_response = self.session.get(card_url) card_response.raise_for_status() - card_details = card_response.json() + return card_response.json() except HTTPError as http_error: self.report.report_failure( key=f"metabase-card-{card_id}", reason=f"Unable to retrieve Card info. " f"Reason: {str(http_error)}", ) + return {} + + def construct_card_from_api_data(self, card_data: dict) -> Optional[ChartSnapshot]: + card_id = card_data.get("id") + if card_id is None: + self.report.report_failure( + key="metabase-card", + reason=f"Unable to get Card id from card data {str(card_data)}", + ) + return None + + card_details = self.get_card_details_by_id(card_id) + if not card_details: + self.report.report_failure( + key=f"metabase-card-{card_id}", + reason="Unable to construct Card due to empty card details", + ) return None chart_urn = builder.make_chart_urn(self.platform, card_id) @@ -357,7 +384,7 @@ def construct_card_from_api_data(self, card_data: dict) -> Optional[ChartSnapsho lastModified=AuditStamp(time=modified_ts, actor=modified_actor), ) - chart_type = self._get_chart_type(card_id, card_details.get("display")) + chart_type = self._get_chart_type(card_id, card_details.get("display") or "") description = card_details.get("description") or "" title = card_details.get("name") or "" datasource_urn = self.get_datasource_urn(card_details) @@ -448,13 +475,30 @@ def construct_card_custom_properties(self, card_details: dict) -> Dict: return custom_properties - def get_datasource_urn(self, card_details: dict) -> Optional[List]: + def get_datasource_urn( + self, card_details: dict, recursion_depth: int = 0 + ) -> Optional[List]: + if recursion_depth > DATASOURCE_URN_RECURSION_LIMIT: + self.report.report_failure( + key=f"metabase-card-{card_details.get('id')}", + reason="Unable to retrieve Card info. Reason: source table recursion depth exceeded", + ) + return None + + datasource_id = card_details.get("database_id") or "" ( platform, database_name, database_schema, platform_instance, - ) = self.get_datasource_from_id(card_details.get("database_id", "")) + ) = self.get_datasource_from_id(datasource_id) + if not platform: + self.report.report_failure( + key=f"metabase-datasource-{datasource_id}", + reason=f"Unable to detect platform for database id {datasource_id}", + ) + return None + query_type = card_details.get("dataset_query", {}).get("type", {}) source_tables = set() @@ -463,8 +507,19 @@ def get_datasource_urn(self, card_details: dict) -> Optional[List]: card_details.get("dataset_query", {}) .get("query", {}) .get("source-table") + or "" ) - if source_table_id is not None: + if str(source_table_id).startswith("card__"): + # question is built not directly from table in DB but from results of other question in Metabase + # trying to get source table from source question. Recursion depth is limited + return self.get_datasource_urn( + card_details=self.get_card_details_by_id( + source_table_id.replace("card__", "") + ), + recursion_depth=recursion_depth + 1, + ) + elif source_table_id != "": + # the question is built directly from table in DB schema_name, table_name = self.get_source_table_from_id(source_table_id) if table_name: source_tables.add( @@ -520,7 +575,9 @@ def get_datasource_urn(self, card_details: dict) -> Optional[List]: return dataset_urn @lru_cache(maxsize=None) - def get_source_table_from_id(self, table_id): + def get_source_table_from_id( + self, table_id: Union[int, str] + ) -> Tuple[Optional[str], Optional[str]]: try: dataset_response = self.session.get( f"{self.config.connect_uri}/api/table/{table_id}" @@ -542,8 +599,8 @@ def get_source_table_from_id(self, table_id): @lru_cache(maxsize=None) def get_platform_instance( - self, platform: Union[str, None] = None, datasource_id: Union[int, None] = None - ) -> Union[str, None]: + self, platform: Optional[str] = None, datasource_id: Optional[int] = None + ) -> Optional[str]: """ Method will attempt to detect `platform_instance` by checking `database_id_to_instance_map` and `platform_instance_map` mappings. @@ -571,7 +628,9 @@ def get_platform_instance( return platform_instance @lru_cache(maxsize=None) - def get_datasource_from_id(self, datasource_id): + def get_datasource_from_id( + self, datasource_id: Union[int, str] + ) -> Tuple[str, Optional[str], Optional[str], Optional[str]]: try: dataset_response = self.session.get( f"{self.config.connect_uri}/api/database/{datasource_id}" @@ -583,7 +642,9 @@ def get_datasource_from_id(self, datasource_id): key=f"metabase-datasource-{datasource_id}", reason=f"Unable to retrieve Datasource. " f"Reason: {str(http_error)}", ) - return None, None + # returning empty string as `platform` because + # `make_dataset_urn_with_platform_instance()` only accepts `str` + return "", None, None, None # Map engine names to what datahub expects in # https://github.com/datahub-project/datahub/blob/master/metadata-service/war/src/main/resources/boot/data_platforms.json diff --git a/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json b/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json index 6e57dfaae0ce0..0ba6afbd04fc9 100644 --- a/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json +++ b/metadata-ingestion/tests/integration/metabase/metabase_mces_golden.json @@ -115,6 +115,61 @@ "runId": "metabase-test" } }, +{ + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.ChartSnapshot": { + "urn": "urn:li:chart:(metabase,3)", + "aspects": [ + { + "com.linkedin.pegasus2avro.chart.ChartInfo": { + "customProperties": { + "Metrics": "Distinct values of order_number, Sum of nominal_total", + "Filters": "['time-interval', ['field', 'completed_at', {'base-type': 'type/DateTimeWithTZ'}], -8, 'day', {'include-current': False}]", + "Dimensions": "completed_at" + }, + "title": "Question with data from other question", + "description": "", + "lastModified": { + "created": { + "time": 1685628119636, + "actor": "urn:li:corpuser:john.doe@example.com" + }, + "lastModified": { + "time": 1685628119636, + "actor": "urn:li:corpuser:john.doe@example.com" + } + }, + "chartUrl": "http://localhost:3000/card/3", + "inputs": [ + { + "string": "urn:li:dataset:(urn:li:dataPlatform:bigquery,acryl-data.public.payment,PROD)" + } + ], + "type": "TABLE" + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:admin@metabase.com", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1636614000000, + "runId": "metabase-test" + } +}, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DashboardSnapshot": { @@ -195,6 +250,21 @@ "runId": "metabase-test" } }, +{ + "entityType": "chart", + "entityUrn": "urn:li:chart:(metabase,3)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1636614000000, + "runId": "metabase-test" + } +}, { "entityType": "dashboard", "entityUrn": "urn:li:dashboard:(metabase,1)", diff --git a/metadata-ingestion/tests/integration/metabase/setup/card.json b/metadata-ingestion/tests/integration/metabase/setup/card.json index 439edbf60014f..83bff66e6c9f3 100644 --- a/metadata-ingestion/tests/integration/metabase/setup/card.json +++ b/metadata-ingestion/tests/integration/metabase/setup/card.json @@ -304,4 +304,196 @@ "favorite": false, "created_at": "2021-12-13T17:48:37.102", "public_uuid": null -}] \ No newline at end of file +}, { + "description": null, + "archived": false, + "collection_position": null, + "table_id": null, + "result_metadata": [ + { + "name": "completed_at", + "display_name": "completed_at", + "base_type": "type/Date", + "special_type": null, + "field_ref": [ + "field", + "completed_at", + { + "base-type": "type/DateTimeWithTZ", + "temporal-unit": "day" + } + ], + "unit": "day", + "fingerprint": { + "global": { + "distinct-count": 1916, + "nil%": 0.0385 + } + } + }, + { + "name": "count", + "display_name": "Distinct values of order_number", + "base_type": "type/BigInteger", + "special_type": "type/Quantity", + "field_ref": [ + "aggregation", + 0 + ], + "fingerprint": { + "global": { + "distinct-count": 8, + "nil%": 0.0 + }, + "type": { + "type/Number": { + "min": 44098.0, + "q1": 46911.0, + "q3": 51276.0, + "max": 52228.0, + "sd": 2797.3306887357558, + "avg": 48557.125 + } + } + } + }, + { + "name": "sum", + "display_name": "Sum of nominal_total", + "base_type": "type/Float", + "special_type": null, + "field_ref": [ + "aggregation", + 1 + ], + "fingerprint": { + "global": { + "distinct-count": 8, + "nil%": 0.0 + }, + "type": { + "type/Number": { + "min": 1.256807007034278E8, + "q1": 1.277180884245776E8, + "q3": 1.4257821803491282E8, + "max": 1.4887777502074698E8, + "sd": 8966928.163419789, + "avg": 1.3526486656272435E8 + } + } + } + } + ], + "creator": { + "email": "john.doe@example.com", + "first_name": "John", + "last_login": "2023-08-03T09:33:25.157021Z", + "is_qbnewb": false, + "is_superuser": false, + "id": 1, + "last_name": "Doe", + "date_joined": "2020-07-13T07:29:31.805765Z", + "common_name": "John Doe" + }, + "can_write": true, + "database_id": 2, + "enable_embedding": false, + "collection_id": 1135, + "query_type": "query", + "name": "Question with data from other question", + "last_query_start": null, + "dashboard_count": 1, + "average_query_time": null, + "creator_id": 31337, + "moderation_reviews": [], + "updated_at": "2023-06-01T14:01:59.592811Z", + "made_public_by_id": null, + "embedding_params": null, + "cache_ttl": null, + "dataset_query": { + "database": 2, + "query": { + "source-table": "card__1", + "filter": [ + "time-interval", + [ + "field", + "completed_at", + { + "base-type": "type/DateTimeWithTZ" + } + ], + -8, + "day", + { + "include-current": false + } + ], + "aggregation": [ + [ + "distinct", + [ + "field", + "order_number", + { + "base-type": "type/Text" + } + ] + ], + [ + "sum", + [ + "field", + "nominal_total", + { + "base-type": "type/Float" + } + ] + ] + ], + "breakout": [ + [ + "field", + "completed_at", + { + "base-type": "type/DateTimeWithTZ", + "temporal-unit": "day" + } + ] + ] + }, + "type": "query" + }, + "id": 3, + "parameter_mappings": null, + "display": "table", + "entity_id": null, + "collection_preview": true, + "last-edit-info": { + "id": 1, + "email": "john.doe@example.com", + "first_name": "John", + "last_name": "Doe", + "timestamp": "2023-06-01T14:01:59.636581Z" + }, + "visualization_settings": {}, + "collection": { + "authority_level": null, + "description": null, + "archived": false, + "slug": "group", + "color": "#509EE3", + "name": "Group", + "personal_owner_id": null, + "id": 1135, + "entity_id": null, + "location": "/3/373/", + "namespace": null, + "created_at": "2020-07-17T19:28:39.513365Z" + }, + "parameters": null, + "dataset": false, + "created_at": "2020-07-17T19:28:39.513365Z", + "parameter_usage_count": 0, + "public_uuid": null +}] diff --git a/metadata-ingestion/tests/integration/metabase/setup/card_3.json b/metadata-ingestion/tests/integration/metabase/setup/card_3.json new file mode 100644 index 0000000000000..3f928cd2e8f69 --- /dev/null +++ b/metadata-ingestion/tests/integration/metabase/setup/card_3.json @@ -0,0 +1,193 @@ +{ + "description": null, + "archived": false, + "collection_position": null, + "table_id": null, + "result_metadata": [ + { + "name": "completed_at", + "display_name": "completed_at", + "base_type": "type/Date", + "special_type": null, + "field_ref": [ + "field", + "completed_at", + { + "base-type": "type/DateTimeWithTZ", + "temporal-unit": "day" + } + ], + "unit": "day", + "fingerprint": { + "global": { + "distinct-count": 1916, + "nil%": 0.0385 + } + } + }, + { + "name": "count", + "display_name": "Distinct values of order_number", + "base_type": "type/BigInteger", + "special_type": "type/Quantity", + "field_ref": [ + "aggregation", + 0 + ], + "fingerprint": { + "global": { + "distinct-count": 8, + "nil%": 0.0 + }, + "type": { + "type/Number": { + "min": 44098.0, + "q1": 46911.0, + "q3": 51276.0, + "max": 52228.0, + "sd": 2797.3306887357558, + "avg": 48557.125 + } + } + } + }, + { + "name": "sum", + "display_name": "Sum of nominal_total", + "base_type": "type/Float", + "special_type": null, + "field_ref": [ + "aggregation", + 1 + ], + "fingerprint": { + "global": { + "distinct-count": 8, + "nil%": 0.0 + }, + "type": { + "type/Number": { + "min": 1.256807007034278E8, + "q1": 1.277180884245776E8, + "q3": 1.4257821803491282E8, + "max": 1.4887777502074698E8, + "sd": 8966928.163419789, + "avg": 1.3526486656272435E8 + } + } + } + } + ], + "creator": { + "email": "john.doe@example.com", + "first_name": "John", + "last_login": "2023-08-03T09:33:25.157021Z", + "is_qbnewb": false, + "is_superuser": false, + "id": 1, + "last_name": "Doe", + "date_joined": "2020-07-13T07:29:31.805765Z", + "common_name": "John Doe" + }, + "can_write": true, + "database_id": 2, + "enable_embedding": false, + "collection_id": 1135, + "query_type": "query", + "name": "Question with data from other question", + "last_query_start": null, + "dashboard_count": 1, + "average_query_time": null, + "creator_id": 1, + "moderation_reviews": [], + "updated_at": "2023-06-01T14:01:59.592811Z", + "made_public_by_id": null, + "embedding_params": null, + "cache_ttl": null, + "dataset_query": { + "database": 2, + "query": { + "source-table": "card__1", + "filter": [ + "time-interval", + [ + "field", + "completed_at", + { + "base-type": "type/DateTimeWithTZ" + } + ], + -8, + "day", + { + "include-current": false + } + ], + "aggregation": [ + [ + "distinct", + [ + "field", + "order_number", + { + "base-type": "type/Text" + } + ] + ], + [ + "sum", + [ + "field", + "nominal_total", + { + "base-type": "type/Float" + } + ] + ] + ], + "breakout": [ + [ + "field", + "completed_at", + { + "base-type": "type/DateTimeWithTZ", + "temporal-unit": "day" + } + ] + ] + }, + "type": "query" + }, + "id": 3, + "parameter_mappings": null, + "display": "table", + "entity_id": null, + "collection_preview": true, + "last-edit-info": { + "id": 1, + "email": "john.doe@example.com", + "first_name": "John", + "last_name": "Doe", + "timestamp": "2023-06-01T14:01:59.636581Z" + }, + "visualization_settings": {}, + "collection": { + "authority_level": null, + "description": null, + "archived": false, + "slug": "group", + "color": "#509EE3", + "name": "Group", + "personal_owner_id": null, + "id": 1135, + "entity_id": null, + "location": "/3/373/", + "namespace": null, + "created_at": "2020-07-17T19:28:39.513365Z" + }, + "parameters": null, + "dataset": false, + "created_at": "2020-07-17T19:28:39.513365Z", + "parameter_usage_count": 0, + "public_uuid": null +} diff --git a/metadata-ingestion/tests/integration/metabase/test_metabase.py b/metadata-ingestion/tests/integration/metabase/test_metabase.py index 5f5c8efedbfeb..24d254fc8469e 100644 --- a/metadata-ingestion/tests/integration/metabase/test_metabase.py +++ b/metadata-ingestion/tests/integration/metabase/test_metabase.py @@ -23,6 +23,7 @@ "http://localhost:3000/api/card/1": "card_1.json", "http://localhost:3000/api/card/2": "card_2.json", "http://localhost:3000/api/table/21": "table_21.json", + "http://localhost:3000/api/card/3": "card_3.json", } RESPONSE_ERROR_LIST = ["http://localhost:3000/api/dashboard"] From 437b7877479bb9fe7b63fa314cae16a9ef3af280 Mon Sep 17 00:00:00 2001 From: Jinlin Yang <86577891+jinlintt@users.noreply.github.com> Date: Mon, 28 Aug 2023 18:17:07 -0500 Subject: [PATCH 05/41] (ingestion) bug fix: emit platform instance aspect for dataset in Databricks ingestion (#8671) --- .../datahub/ingestion/source/unity/config.py | 5 +++++ .../datahub/ingestion/source/unity/source.py | 17 +++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index cef4b763fea57..94ff755e3b254 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -97,6 +97,11 @@ class UnityCatalogSourceConfig( description="Name of the workspace. Default to deployment name present in workspace_url", ) + ingest_data_platform_instance_aspect: Optional[bool] = pydantic.Field( + default=False, + description="Option to enable/disable ingestion of the data platform instance aspect. The default data platform instance id for a dataset is workspace_name", + ) + _only_ingest_assigned_metastore_removed = pydantic_removed_field( "only_ingest_assigned_metastore" ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index 69e1cac79380d..493acb939c3bb 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -7,6 +7,7 @@ from datahub.emitter.mce_builder import ( make_data_platform_urn, + make_dataplatform_instance_urn, make_dataset_urn_with_platform_instance, make_domain_urn, make_schema_field_urn, @@ -68,6 +69,7 @@ ViewProperties, ) from datahub.metadata.schema_classes import ( + DataPlatformInstanceClass, DatasetLineageTypeClass, DatasetPropertiesClass, DomainsClass, @@ -278,6 +280,7 @@ def process_table(self, table: Table, schema: Schema) -> Iterable[MetadataWorkUn operation = self._create_table_operation_aspect(table) domain = self._get_domain_aspect(dataset_name=table.ref.qualified_table_name) ownership = self._create_table_ownership_aspect(table) + data_platform_instance = self._create_data_platform_instance_aspect(table) lineage: Optional[UpstreamLineageClass] = None if self.config.include_column_lineage: @@ -299,6 +302,7 @@ def process_table(self, table: Table, schema: Schema) -> Iterable[MetadataWorkUn operation, domain, ownership, + data_platform_instance, lineage, ], ) @@ -558,6 +562,19 @@ def _create_table_ownership_aspect(self, table: Table) -> Optional[OwnershipClas ) return None + def _create_data_platform_instance_aspect( + self, table: Table + ) -> Optional[DataPlatformInstanceClass]: + # Only ingest the DPI aspect if the flag is true + if self.config.ingest_data_platform_instance_aspect: + return DataPlatformInstanceClass( + platform=make_data_platform_urn(self.platform), + instance=make_dataplatform_instance_urn( + self.platform, self.platform_instance_name + ), + ) + return None + def _create_table_sub_type_aspect(self, table: Table) -> SubTypesClass: return SubTypesClass( typeNames=[DatasetSubTypes.VIEW if table.is_view else DatasetSubTypes.TABLE] From 7b0ebe637c2491688dcbeb588a9ba3180ebaff71 Mon Sep 17 00:00:00 2001 From: Indy Prentice Date: Tue, 29 Aug 2023 13:07:13 -0300 Subject: [PATCH 06/41] feat(config): Turn on new search & browse experience by default (#8737) Co-authored-by: Indy Prentice --- .../configuration/src/main/resources/application.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index d21442d0bf5c8..f49498bfa2325 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -294,8 +294,8 @@ featureFlags: alwaysEmitChangeLog: ${ALWAYS_EMIT_CHANGE_LOG:false} # Enables always emitting a MCL even when no changes are detected. Used for Time Based Lineage when no changes occur. searchServiceDiffModeEnabled: ${SEARCH_SERVICE_DIFF_MODE_ENABLED:true} # Enables diff mode for search document writes, reduces amount of writes to ElasticSearch documents for no-ops readOnlyModeEnabled: ${READ_ONLY_MODE_ENABLED:false} # Enables read only mode for an instance. Right now this only affects ability to edit user profile image URL but can be extended - showSearchFiltersV2: ${SHOW_SEARCH_FILTERS_V2:false} # Enables showing the search filters V2 experience. - showBrowseV2: ${SHOW_BROWSE_V2:false} # Enables showing the browse v2 sidebar experience. + showSearchFiltersV2: ${SHOW_SEARCH_FILTERS_V2:true} # Enables showing the search filters V2 experience. + showBrowseV2: ${SHOW_BROWSE_V2:true} # Enables showing the browse v2 sidebar experience. preProcessHooks: uiEnabled: ${PRE_PROCESS_HOOKS_UI_ENABLED:true} # Circumvents Kafka for processing index updates for UI changes sourced from GraphQL to avoid processing delays showAcrylInfo: ${SHOW_ACRYL_INFO:false} # Show different CTAs within DataHub around moving to Managed DataHub. Set to true for the demo site. From d86b336e70f564917e8e4d8bb45277389d1b14e9 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Tue, 29 Aug 2023 18:11:37 +0200 Subject: [PATCH 07/41] chore(ingest/s3) Bump Deequ and Pyspark version (#8638) Co-authored-by: Andrew Sikowitz --- .github/workflows/metadata-ingestion.yml | 2 +- metadata-ingestion/docs/sources/s3/s3.md | 6 + metadata-ingestion/setup.py | 4 +- .../src/datahub/ingestion/source/s3/source.py | 9 +- .../local/golden_mces_multiple_files.json | 126 ++++++++++-------- .../golden_mces_multiple_spec_for_files.json | 126 ++++++++++-------- ...s_multiple_specs_of_different_buckets.json | 126 ++++++++++-------- .../local/golden_mces_single_file.json | 126 ++++++++++-------- .../s3/golden_mces_multiple_files.json | 32 ++--- .../golden_mces_multiple_spec_for_files.json | 32 ++--- ...s_multiple_specs_of_different_buckets.json | 32 ++--- .../s3/golden_mces_single_file.json | 32 ++--- .../folder_aaa/chord_progressions_avro.avro | Bin 1024 -> 619 bytes .../tests/integration/s3/test_s3.py | 2 +- 14 files changed, 363 insertions(+), 292 deletions(-) diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index 23d7ee9427f42..fb70c85fdec93 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -25,7 +25,7 @@ jobs: metadata-ingestion: runs-on: ubuntu-latest env: - SPARK_VERSION: 3.0.3 + SPARK_VERSION: 3.3.2 DATAHUB_TELEMETRY_ENABLED: false # TODO: Enable this once the test is fixed. # DATAHUB_LOOKML_GIT_TEST_SSH_KEY: ${{ secrets.DATAHUB_LOOKML_GIT_TEST_SSH_KEY }} diff --git a/metadata-ingestion/docs/sources/s3/s3.md b/metadata-ingestion/docs/sources/s3/s3.md index 93715629d0b8e..9484cd8de6666 100644 --- a/metadata-ingestion/docs/sources/s3/s3.md +++ b/metadata-ingestion/docs/sources/s3/s3.md @@ -196,3 +196,9 @@ If you are ingesting datasets from AWS S3, we recommend running the ingestion on Profiles are computed with PyDeequ, which relies on PySpark. Therefore, for computing profiles, we currently require Spark 3.0.3 with Hadoop 3.2 to be installed and the `SPARK_HOME` and `SPARK_VERSION` environment variables to be set. The Spark+Hadoop binary can be downloaded [here](https://www.apache.org/dyn/closer.lua/spark/spark-3.0.3/spark-3.0.3-bin-hadoop3.2.tgz). For an example guide on setting up PyDeequ on AWS, see [this guide](https://aws.amazon.com/blogs/big-data/testing-data-quality-at-scale-with-pydeequ/). + +:::caution + +From Spark 3.2.0+, Avro reader fails on column names that don't start with a letter and contains other character than letters, number, and underscore. [https://github.com/apache/spark/blob/72c62b6596d21e975c5597f8fff84b1a9d070a02/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroFileFormat.scala#L158] +Avro files that contain such columns won't be profiled. +::: \ No newline at end of file diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index ded9186e08a22..9195dab7bf5b7 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -247,8 +247,8 @@ def get_long_description(): } data_lake_profiling = { - "pydeequ>=1.0.1, <1.1", - "pyspark==3.0.3", + "pydeequ==1.1.0", + "pyspark~=3.3.0", } delta_lake = { diff --git a/metadata-ingestion/src/datahub/ingestion/source/s3/source.py b/metadata-ingestion/src/datahub/ingestion/source/s3/source.py index 4247ee9330cfb..ab5d3a4e007ac 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/s3/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/s3/source.py @@ -261,13 +261,14 @@ def init_spark(self): import pydeequ conf = SparkConf() - + spark_version = os.getenv("SPARK_VERSION", "3.3") conf.set( "spark.jars.packages", ",".join( [ "org.apache.hadoop:hadoop-aws:3.0.3", - "org.apache.spark:spark-avro_2.12:3.0.3", + # Spark's avro version needs to be matched with the Spark version + f"org.apache.spark:spark-avro_2.12:{spark_version}{'.0' if spark_version.count('.') == 1 else ''}", pydeequ.deequ_maven_coord, ] ), @@ -374,10 +375,10 @@ def read_file_spark(self, file: str, ext: str) -> Optional[DataFrame]: elif ext.endswith(".avro"): try: df = self.spark.read.format("avro").load(file) - except AnalysisException: + except AnalysisException as e: self.report.report_warning( file, - "To ingest avro files, please install the spark-avro package: https://mvnrepository.com/artifact/org.apache.spark/spark-avro_2.12/3.0.3", + f"Avro file reading failed with exception. The error was: {e}", ) return None diff --git a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_files.json b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_files.json index ceec764bfbc86..d59fce788c95e 100644 --- a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_files.json +++ b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_files.json @@ -2782,7 +2782,7 @@ "customProperties": { "schema_inferred_from": "tests/integration/s3/test_data/local_system/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro", "number_of_files": "1", - "size_in_bytes": "1024" + "size_in_bytes": "619" }, "name": "chord_progressions_avro.avro", "description": "", @@ -2820,62 +2820,62 @@ }, "fields": [ { - "fieldPath": "[version=2.0].[type=Root].[type=double].Progression Quality", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FirstChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "double", + "nativeDataType": "FirstChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].1st chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FourthChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "FourthChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].2nd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].SecondChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "SecondChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].3rd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].ThirdChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "ThirdChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=string].4th chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=string].ProgressionQuality", + "nullable": false, "type": { "type": { "com.linkedin.schema.StringType": {} } }, - "nativeDataType": "string", + "nativeDataType": "ProgressionQuality", "recursive": false, "isPartOfKey": false } @@ -2939,7 +2939,58 @@ "columnCount": 5, "fieldProfiles": [ { - "fieldPath": "1st chord", + "fieldPath": "FirstChord", + "uniqueCount": 5, + "uniqueProportion": 0.17857142857142858, + "nullCount": 0, + "nullProportion": 0.0, + "distinctValueFrequencies": [ + { + "value": "1", + "frequency": 19 + }, + { + "value": "2", + "frequency": 3 + }, + { + "value": "4", + "frequency": 2 + }, + { + "value": "5", + "frequency": 1 + }, + { + "value": "6", + "frequency": 3 + } + ], + "sampleValues": [ + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "2", + "4", + "5", + "6", + "6", + "6" + ] + }, + { + "fieldPath": "SecondChord", "uniqueCount": 5, "uniqueProportion": 0.17857142857142858, "nullCount": 0, @@ -2990,7 +3041,7 @@ ] }, { - "fieldPath": "2nd chord", + "fieldPath": "ThirdChord", "uniqueCount": 7, "uniqueProportion": 0.25, "nullCount": 0, @@ -3049,7 +3100,7 @@ ] }, { - "fieldPath": "3rd chord", + "fieldPath": "FourthChord", "uniqueCount": 6, "uniqueProportion": 0.21428571428571427, "nullCount": 0, @@ -3104,7 +3155,7 @@ ] }, { - "fieldPath": "4th chord", + "fieldPath": "ProgressionQuality", "uniqueCount": 20, "uniqueProportion": 0.7142857142857143, "nullCount": 0, @@ -3213,41 +3264,6 @@ "Sweet", "Wistful" ] - }, - { - "fieldPath": "Progression Quality", - "uniqueCount": 1, - "uniqueProportion": 0.03571428571428571, - "nullCount": 0, - "nullProportion": 0.0, - "distinctValueFrequencies": [ - { - "value": "NaN", - "frequency": 28 - } - ], - "sampleValues": [ - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan" - ] } ] } diff --git a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_spec_for_files.json b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_spec_for_files.json index 1bd75ae457cb4..ed2c992655a89 100644 --- a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_spec_for_files.json +++ b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_spec_for_files.json @@ -9,7 +9,7 @@ "customProperties": { "schema_inferred_from": "tests/integration/s3/test_data/local_system/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro", "number_of_files": "1", - "size_in_bytes": "1024" + "size_in_bytes": "619" }, "name": "chord_progressions_avro.avro", "description": "", @@ -47,62 +47,62 @@ }, "fields": [ { - "fieldPath": "[version=2.0].[type=Root].[type=double].Progression Quality", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FirstChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "double", + "nativeDataType": "FirstChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].1st chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FourthChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "FourthChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].2nd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].SecondChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "SecondChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].3rd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].ThirdChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "ThirdChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=string].4th chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=string].ProgressionQuality", + "nullable": false, "type": { "type": { "com.linkedin.schema.StringType": {} } }, - "nativeDataType": "string", + "nativeDataType": "ProgressionQuality", "recursive": false, "isPartOfKey": false } @@ -1046,7 +1046,58 @@ "columnCount": 5, "fieldProfiles": [ { - "fieldPath": "1st chord", + "fieldPath": "FirstChord", + "uniqueCount": 5, + "uniqueProportion": 0.17857142857142858, + "nullCount": 0, + "nullProportion": 0.0, + "distinctValueFrequencies": [ + { + "value": "1", + "frequency": 19 + }, + { + "value": "2", + "frequency": 3 + }, + { + "value": "4", + "frequency": 2 + }, + { + "value": "5", + "frequency": 1 + }, + { + "value": "6", + "frequency": 3 + } + ], + "sampleValues": [ + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "2", + "4", + "5", + "6", + "6", + "6" + ] + }, + { + "fieldPath": "SecondChord", "uniqueCount": 5, "uniqueProportion": 0.17857142857142858, "nullCount": 0, @@ -1097,7 +1148,7 @@ ] }, { - "fieldPath": "2nd chord", + "fieldPath": "ThirdChord", "uniqueCount": 7, "uniqueProportion": 0.25, "nullCount": 0, @@ -1156,7 +1207,7 @@ ] }, { - "fieldPath": "3rd chord", + "fieldPath": "FourthChord", "uniqueCount": 6, "uniqueProportion": 0.21428571428571427, "nullCount": 0, @@ -1211,7 +1262,7 @@ ] }, { - "fieldPath": "4th chord", + "fieldPath": "ProgressionQuality", "uniqueCount": 20, "uniqueProportion": 0.7142857142857143, "nullCount": 0, @@ -1320,41 +1371,6 @@ "Sweet", "Wistful" ] - }, - { - "fieldPath": "Progression Quality", - "uniqueCount": 1, - "uniqueProportion": 0.03571428571428571, - "nullCount": 0, - "nullProportion": 0.0, - "distinctValueFrequencies": [ - { - "value": "NaN", - "frequency": 28 - } - ], - "sampleValues": [ - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan" - ] } ] } diff --git a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_specs_of_different_buckets.json b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_specs_of_different_buckets.json index b9687b97571cb..f7793140fe033 100644 --- a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_specs_of_different_buckets.json +++ b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_multiple_specs_of_different_buckets.json @@ -9,7 +9,7 @@ "customProperties": { "schema_inferred_from": "tests/integration/s3/test_data/local_system/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro", "number_of_files": "1", - "size_in_bytes": "1024" + "size_in_bytes": "619" }, "name": "chord_progressions_avro.avro", "description": "", @@ -47,62 +47,62 @@ }, "fields": [ { - "fieldPath": "[version=2.0].[type=Root].[type=double].Progression Quality", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FirstChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "double", + "nativeDataType": "FirstChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].1st chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FourthChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "FourthChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].2nd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].SecondChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "SecondChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].3rd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].ThirdChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "ThirdChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=string].4th chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=string].ProgressionQuality", + "nullable": false, "type": { "type": { "com.linkedin.schema.StringType": {} } }, - "nativeDataType": "string", + "nativeDataType": "ProgressionQuality", "recursive": false, "isPartOfKey": false } @@ -1046,7 +1046,58 @@ "columnCount": 5, "fieldProfiles": [ { - "fieldPath": "1st chord", + "fieldPath": "FirstChord", + "uniqueCount": 5, + "uniqueProportion": 0.17857142857142858, + "nullCount": 0, + "nullProportion": 0.0, + "distinctValueFrequencies": [ + { + "value": "1", + "frequency": 19 + }, + { + "value": "2", + "frequency": 3 + }, + { + "value": "4", + "frequency": 2 + }, + { + "value": "5", + "frequency": 1 + }, + { + "value": "6", + "frequency": 3 + } + ], + "sampleValues": [ + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "2", + "4", + "5", + "6", + "6", + "6" + ] + }, + { + "fieldPath": "SecondChord", "uniqueCount": 5, "uniqueProportion": 0.17857142857142858, "nullCount": 0, @@ -1097,7 +1148,7 @@ ] }, { - "fieldPath": "2nd chord", + "fieldPath": "ThirdChord", "uniqueCount": 7, "uniqueProportion": 0.25, "nullCount": 0, @@ -1156,7 +1207,7 @@ ] }, { - "fieldPath": "3rd chord", + "fieldPath": "FourthChord", "uniqueCount": 6, "uniqueProportion": 0.21428571428571427, "nullCount": 0, @@ -1211,7 +1262,7 @@ ] }, { - "fieldPath": "4th chord", + "fieldPath": "ProgressionQuality", "uniqueCount": 20, "uniqueProportion": 0.7142857142857143, "nullCount": 0, @@ -1320,41 +1371,6 @@ "Sweet", "Wistful" ] - }, - { - "fieldPath": "Progression Quality", - "uniqueCount": 1, - "uniqueProportion": 0.03571428571428571, - "nullCount": 0, - "nullProportion": 0.0, - "distinctValueFrequencies": [ - { - "value": "NaN", - "frequency": 28 - } - ], - "sampleValues": [ - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan" - ] } ] } diff --git a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_single_file.json b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_single_file.json index a5a68777cad5c..f54c62865bcde 100644 --- a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_single_file.json +++ b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_single_file.json @@ -9,7 +9,7 @@ "customProperties": { "schema_inferred_from": "tests/integration/s3/test_data/local_system/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro", "number_of_files": "1", - "size_in_bytes": "1024" + "size_in_bytes": "619" }, "name": "chord_progressions_avro.avro", "description": "", @@ -47,62 +47,62 @@ }, "fields": [ { - "fieldPath": "[version=2.0].[type=Root].[type=double].Progression Quality", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FirstChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "double", + "nativeDataType": "FirstChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].1st chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FourthChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "FourthChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].2nd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].SecondChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "SecondChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].3rd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].ThirdChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "ThirdChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=string].4th chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=string].ProgressionQuality", + "nullable": false, "type": { "type": { "com.linkedin.schema.StringType": {} } }, - "nativeDataType": "string", + "nativeDataType": "ProgressionQuality", "recursive": false, "isPartOfKey": false } @@ -1046,7 +1046,58 @@ "columnCount": 5, "fieldProfiles": [ { - "fieldPath": "1st chord", + "fieldPath": "FirstChord", + "uniqueCount": 5, + "uniqueProportion": 0.17857142857142858, + "nullCount": 0, + "nullProportion": 0.0, + "distinctValueFrequencies": [ + { + "value": "1", + "frequency": 19 + }, + { + "value": "2", + "frequency": 3 + }, + { + "value": "4", + "frequency": 2 + }, + { + "value": "5", + "frequency": 1 + }, + { + "value": "6", + "frequency": 3 + } + ], + "sampleValues": [ + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "1", + "2", + "4", + "5", + "6", + "6", + "6" + ] + }, + { + "fieldPath": "SecondChord", "uniqueCount": 5, "uniqueProportion": 0.17857142857142858, "nullCount": 0, @@ -1097,7 +1148,7 @@ ] }, { - "fieldPath": "2nd chord", + "fieldPath": "ThirdChord", "uniqueCount": 7, "uniqueProportion": 0.25, "nullCount": 0, @@ -1156,7 +1207,7 @@ ] }, { - "fieldPath": "3rd chord", + "fieldPath": "FourthChord", "uniqueCount": 6, "uniqueProportion": 0.21428571428571427, "nullCount": 0, @@ -1211,7 +1262,7 @@ ] }, { - "fieldPath": "4th chord", + "fieldPath": "ProgressionQuality", "uniqueCount": 20, "uniqueProportion": 0.7142857142857143, "nullCount": 0, @@ -1320,41 +1371,6 @@ "Sweet", "Wistful" ] - }, - { - "fieldPath": "Progression Quality", - "uniqueCount": 1, - "uniqueProportion": 0.03571428571428571, - "nullCount": 0, - "nullProportion": 0.0, - "distinctValueFrequencies": [ - { - "value": "NaN", - "frequency": 28 - } - ], - "sampleValues": [ - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan", - "nan" - ] } ] } diff --git a/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_files.json b/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_files.json index 36d3ba1b3510d..58c225e1ec4c9 100644 --- a/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_files.json +++ b/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_files.json @@ -949,7 +949,7 @@ "customProperties": { "schema_inferred_from": "s3://my-test-bucket/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro", "number_of_files": "1", - "size_in_bytes": "1024" + "size_in_bytes": "619" }, "name": "chord_progressions_avro.avro", "description": "", @@ -1003,62 +1003,62 @@ }, "fields": [ { - "fieldPath": "[version=2.0].[type=Root].[type=double].Progression Quality", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FirstChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "double", + "nativeDataType": "FirstChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].1st chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FourthChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "FourthChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].2nd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].SecondChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "SecondChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].3rd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].ThirdChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "ThirdChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=string].4th chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=string].ProgressionQuality", + "nullable": false, "type": { "type": { "com.linkedin.schema.StringType": {} } }, - "nativeDataType": "string", + "nativeDataType": "ProgressionQuality", "recursive": false, "isPartOfKey": false } diff --git a/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_spec_for_files.json b/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_spec_for_files.json index 84ace7d673676..9c41bbdc80c49 100644 --- a/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_spec_for_files.json +++ b/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_spec_for_files.json @@ -9,7 +9,7 @@ "customProperties": { "schema_inferred_from": "s3://my-test-bucket/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro", "number_of_files": "1", - "size_in_bytes": "1024" + "size_in_bytes": "619" }, "name": "chord_progressions_avro.avro", "description": "", @@ -47,62 +47,62 @@ }, "fields": [ { - "fieldPath": "[version=2.0].[type=Root].[type=double].Progression Quality", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FirstChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "double", + "nativeDataType": "FirstChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].1st chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FourthChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "FourthChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].2nd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].SecondChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "SecondChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].3rd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].ThirdChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "ThirdChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=string].4th chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=string].ProgressionQuality", + "nullable": false, "type": { "type": { "com.linkedin.schema.StringType": {} } }, - "nativeDataType": "string", + "nativeDataType": "ProgressionQuality", "recursive": false, "isPartOfKey": false } diff --git a/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_specs_of_different_buckets.json b/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_specs_of_different_buckets.json index f7f3cb8fb743e..985140f774ab4 100644 --- a/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_specs_of_different_buckets.json +++ b/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_multiple_specs_of_different_buckets.json @@ -9,7 +9,7 @@ "customProperties": { "schema_inferred_from": "s3://my-test-bucket/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro", "number_of_files": "1", - "size_in_bytes": "1024" + "size_in_bytes": "619" }, "name": "chord_progressions_avro.avro", "description": "", @@ -47,62 +47,62 @@ }, "fields": [ { - "fieldPath": "[version=2.0].[type=Root].[type=double].Progression Quality", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FirstChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "double", + "nativeDataType": "FirstChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].1st chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FourthChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "FourthChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].2nd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].SecondChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "SecondChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].3rd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].ThirdChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "ThirdChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=string].4th chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=string].ProgressionQuality", + "nullable": false, "type": { "type": { "com.linkedin.schema.StringType": {} } }, - "nativeDataType": "string", + "nativeDataType": "ProgressionQuality", "recursive": false, "isPartOfKey": false } diff --git a/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_single_file.json b/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_single_file.json index 5353d95ada8f7..5d87d423a6a67 100644 --- a/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_single_file.json +++ b/metadata-ingestion/tests/integration/s3/golden-files/s3/golden_mces_single_file.json @@ -9,7 +9,7 @@ "customProperties": { "schema_inferred_from": "s3://my-test-bucket/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro", "number_of_files": "1", - "size_in_bytes": "1024" + "size_in_bytes": "619" }, "name": "chord_progressions_avro.avro", "description": "", @@ -47,62 +47,62 @@ }, "fields": [ { - "fieldPath": "[version=2.0].[type=Root].[type=double].Progression Quality", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FirstChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "double", + "nativeDataType": "FirstChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].1st chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].FourthChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "FourthChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].2nd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].SecondChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "SecondChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=long].3rd chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=long].ThirdChord", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "long", + "nativeDataType": "ThirdChord", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "[version=2.0].[type=Root].[type=string].4th chord", - "nullable": true, + "fieldPath": "[version=2.0].[type=Record].[type=string].ProgressionQuality", + "nullable": false, "type": { "type": { "com.linkedin.schema.StringType": {} } }, - "nativeDataType": "string", + "nativeDataType": "ProgressionQuality", "recursive": false, "isPartOfKey": false } diff --git a/metadata-ingestion/tests/integration/s3/test_data/local_system/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro b/metadata-ingestion/tests/integration/s3/test_data/local_system/folder_a/folder_aa/folder_aaa/chord_progressions_avro.avro index 8a6d9df66bb79615abbee50ea71d92d5043134c4..79c329b3f8dca4fafd677999cd691b52d9839d81 100644 GIT binary patch literal 619 zcmah`!A=}83~k-f$cm!Ts#@^@b4EQ^d#iw{S{0Qtl{i#Ago&96WaCwm*=j-jMz8&i z-uMVEd`AC(dSO9oFPn?~lK1Rq=kw|L%VNhYBfm-AKy!2V>Z;DG1=X`UQpeHOyLB-0 zH9mgAXO6s0_3Yc#(^KPO%6r2jeIxfA`uckJnbV3q*e#^5gNxfZ4UaJoTM8XT?jxn- zID8q6cX=g)Y@DSW-P+st>*>Sf9b4)!jNZSvH6MO9H?!aO?fnh@iu&!JLL;c1cRo`L zCc7n}V7+H@H>^O;bl=&YqO;7PQBC$E8GAN*(8WIhtAk*@ID!)O>w`eCr|ddVfY`9V z0KG}H!gA7h*?H(#E3KXZV5#-{IEJ1ICCxsX%S%9o_=%SUVrz0pbB>OZIK=XZ7of{( z9Z_i0V>%`89gj(nDqCYgv=32=tfN8z)&3yLQVn(M<_k}tq}L{u+sTQO&v%wU7}beO VS%xh@RqBx0t_rC1WA^_l{{n-1wb1|o literal 1024 zcmb7DO>5gg5KR|jgb>0M6LJ(fhd`Sin%+vjAS5KQ4>^P!*4l}@?Rv+oR%x9OdhRWU zmi(FghTxwNdg>48p~ucDrYqTPAiC{)^WMyxT^$ZSyl8d#g~7E91!RFPm0GUOMK**H z|IxZi?PLt;yCgM`p%Lk7l4?GxkRGCS=W_{46jXD1^_P29*d!ay1T%}%)cHGwLY=2) z*Q<#ayw>7*f_3vu!5ce#qC;VgTo~%eU~Zr&Bz;d8~5}jQy-QE6N z-<^K{_VD@s*L3CU#@m~a1&pzfEx%V5jOJEe09&fQ-(O~2w8?EYoOqZN^mVtS$iYZ~ z=&6V_A?s`z;NrYgb)xluB4F{h7K+k%L7SW+_HtW;$-&CZGa)q(Sg?cYqX(mZfi7K} zb#@fJ3Z2!)49S|tTqIn5#G+kO>b$lH*OH>`@yko!(j}!i_Hl7#odX!5kx Date: Tue, 29 Aug 2023 12:32:27 -0400 Subject: [PATCH 08/41] docs(ingest/openapi): Downgrade status from CERTIFIED to INCUBATING (#8736) --- metadata-ingestion/src/datahub/ingestion/source/openapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/openapi.py b/metadata-ingestion/src/datahub/ingestion/source/openapi.py index 42924a09a39e9..3925ba51c16dd 100755 --- a/metadata-ingestion/src/datahub/ingestion/source/openapi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/openapi.py @@ -108,7 +108,7 @@ class ApiWorkUnit(MetadataWorkUnit): @platform_name("OpenAPI", id="openapi") @config_class(OpenApiConfig) -@support_status(SupportStatus.CERTIFIED) +@support_status(SupportStatus.INCUBATING) @capability(SourceCapability.PLATFORM_INSTANCE, supported=False, description="") class APISource(Source, ABC): """ From 4539a1cf207a51db71aba9f1e88a362444165974 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 29 Aug 2023 09:32:35 -0700 Subject: [PATCH 09/41] feat(health): Adding Entity Health Status to the Lineage Graph View (#8739) --- .../src/app/entity/dataset/DatasetEntity.tsx | 1 + .../profile/header/EntityHealth.tsx | 10 ++++---- .../profile/header/EntityHealthPopover.tsx | 8 ++++--- .../src/app/lineage/LineageEntityNode.tsx | 18 +++++++++++++- .../src/app/lineage/constants.ts | 2 ++ datahub-web-react/src/app/lineage/types.ts | 3 +++ .../app/lineage/utils/constructFetchedNode.ts | 1 + .../src/app/lineage/utils/constructTree.ts | 1 + .../src/app/shared/health/healthUtils.tsx | 24 ++++++++++++------- datahub-web-react/src/graphql/lineage.graphql | 6 +++++ 10 files changed, 58 insertions(+), 16 deletions(-) diff --git a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx index ed3904bcf4e2d..535a3f569964c 100644 --- a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx +++ b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx @@ -317,6 +317,7 @@ export class DatasetEntity implements Entity { subtype: entity?.subTypes?.typeNames?.[0] || undefined, icon: entity?.platform?.properties?.logoUrl || undefined, platform: entity?.platform, + health: entity?.health || undefined, }; }; diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHealth.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHealth.tsx index baef67a3d1c88..30713afa888b8 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHealth.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHealth.tsx @@ -2,7 +2,7 @@ import React from 'react'; import styled from 'styled-components'; import { Link } from 'react-router-dom'; import { Health } from '../../../../../../types.generated'; -import { getHealthSummaryIcon, isUnhealthy } from '../../../../../shared/health/healthUtils'; +import { getHealthSummaryIcon, HealthSummaryIconType, isUnhealthy } from '../../../../../shared/health/healthUtils'; import { EntityHealthPopover } from './EntityHealthPopover'; const Container = styled.div` @@ -14,17 +14,19 @@ const Container = styled.div` type Props = { health: Health[]; baseUrl: string; + fontSize?: number; + tooltipPlacement?: any; }; -export const EntityHealth = ({ health, baseUrl }: Props) => { +export const EntityHealth = ({ health, baseUrl, fontSize, tooltipPlacement }: Props) => { const unhealthy = isUnhealthy(health); - const icon = getHealthSummaryIcon(health); + const icon = getHealthSummaryIcon(health, HealthSummaryIconType.FILLED, fontSize); return ( <> {(unhealthy && ( - + {icon} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHealthPopover.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHealthPopover.tsx index 0d327a54a62d1..4dde3ffcbb6a4 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHealthPopover.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHealthPopover.tsx @@ -50,10 +50,12 @@ type Props = { health: Health[]; baseUrl: string; children: React.ReactNode; + fontSize?: number; + placement?: any; }; -export const EntityHealthPopover = ({ health, baseUrl, children }: Props) => { - const icon = getHealthSummaryIcon(health, HealthSummaryIconType.OUTLINED); +export const EntityHealthPopover = ({ health, baseUrl, children, fontSize, placement = 'right' }: Props) => { + const icon = getHealthSummaryIcon(health, HealthSummaryIconType.OUTLINED, fontSize); const message = getHealthSummaryMessage(health); return ( { } color="#262626" - placement="right" + placement={placement} zIndex={10000000} > {children} diff --git a/datahub-web-react/src/app/lineage/LineageEntityNode.tsx b/datahub-web-react/src/app/lineage/LineageEntityNode.tsx index 4526e3a225ce2..f5be1d57db070 100644 --- a/datahub-web-react/src/app/lineage/LineageEntityNode.tsx +++ b/datahub-web-react/src/app/lineage/LineageEntityNode.tsx @@ -12,11 +12,12 @@ import { getShortenedTitle, nodeHeightFromTitleLength } from './utils/titleUtils import { LineageExplorerContext } from './utils/LineageExplorerContext'; import { useGetEntityLineageLazyQuery } from '../../graphql/lineage.generated'; import { useIsSeparateSiblingsMode } from '../entity/shared/siblingUtils'; -import { centerX, centerY, iconHeight, iconWidth, iconX, iconY, textX, width } from './constants'; +import { centerX, centerY, iconHeight, iconWidth, iconX, iconY, textX, width, healthX, healthY } from './constants'; import LineageEntityColumns from './LineageEntityColumns'; import { convertInputFieldsToSchemaFields } from './utils/columnLineageUtils'; import ManageLineageMenu from './manage/ManageLineageMenu'; import { useGetLineageTimeParams } from './utils/useGetLineageTimeParams'; +import { EntityHealth } from '../entity/shared/containers/profile/header/EntityHealth'; const CLICK_DELAY_THRESHOLD = 1000; const DRAG_DISTANCE_THRESHOLD = 20; @@ -136,6 +137,11 @@ export default function LineageEntityNode({ capitalizeFirstLetterOnly(node.data.subtype) || (node.data.type && entityRegistry.getEntityName(node.data.type)); + // Health + const { health } = node.data; + const baseUrl = node.data.type && node.data.urn && entityRegistry.getEntityUrl(node.data.type, node.data.urn); + const hasHealth = (health && baseUrl) || false; + return ( {unexploredHiddenChildren && (isHovered || isSelected) ? ( @@ -359,6 +365,16 @@ export default function LineageEntityNode({ {getShortenedTitle(node.data.name, width)} )} + + {hasHealth && ( + + )} + {unexploredHiddenChildren && isHovered ? ( ; downstreamRelationships?: Array; + health?: Health[]; }; export type VizNode = { diff --git a/datahub-web-react/src/app/lineage/utils/constructFetchedNode.ts b/datahub-web-react/src/app/lineage/utils/constructFetchedNode.ts index 143b226bda687..778d0e325f7cb 100644 --- a/datahub-web-react/src/app/lineage/utils/constructFetchedNode.ts +++ b/datahub-web-react/src/app/lineage/utils/constructFetchedNode.ts @@ -67,6 +67,7 @@ export default function constructFetchedNode( canEditLineage: fetchedNode.canEditLineage, upstreamRelationships: fetchedNode?.upstreamRelationships || [], downstreamRelationships: fetchedNode?.downstreamRelationships || [], + health: fetchedNode?.health, }; // eslint-disable-next-line no-param-reassign diff --git a/datahub-web-react/src/app/lineage/utils/constructTree.ts b/datahub-web-react/src/app/lineage/utils/constructTree.ts index 8374509ad74eb..7da6fc56b57bd 100644 --- a/datahub-web-react/src/app/lineage/utils/constructTree.ts +++ b/datahub-web-react/src/app/lineage/utils/constructTree.ts @@ -100,6 +100,7 @@ export default function constructTree( canEditLineage: fetchedEntity?.canEditLineage, upstreamRelationships: fetchedEntity?.upstreamRelationships || [], downstreamRelationships: fetchedEntity?.downstreamRelationships || [], + health: fetchedEntity?.health, }; const lineageConfig = entityRegistry.getLineageVizConfig(entityAndType.type, entityAndType.entity); let updatedLineageConfig = { ...lineageConfig }; diff --git a/datahub-web-react/src/app/shared/health/healthUtils.tsx b/datahub-web-react/src/app/shared/health/healthUtils.tsx index 823d77d7eabe9..ff7d9b417617c 100644 --- a/datahub-web-react/src/app/shared/health/healthUtils.tsx +++ b/datahub-web-react/src/app/shared/health/healthUtils.tsx @@ -11,13 +11,17 @@ import { HealthStatus, HealthStatusType, Health } from '../../../types.generated const HEALTH_INDICATOR_COLOR = '#d48806'; -const UnhealthyIconFilled = styled(ExclamationCircleTwoTone)` - font-size: 16px; +const UnhealthyIconFilled = styled(ExclamationCircleTwoTone)<{ fontSize: number }>` + && { + font-size: ${(props) => props.fontSize}px; + } `; -const UnhealthyIconOutlined = styled(ExclamationCircleOutlined)` +const UnhealthyIconOutlined = styled(ExclamationCircleOutlined)<{ fontSize: number }>` color: ${HEALTH_INDICATOR_COLOR}; - font-size: 16px; + && { + font-size: ${(props) => props.fontSize}px; + } `; export enum HealthSummaryIconType { @@ -32,12 +36,16 @@ export const isUnhealthy = (healths: Health[]) => { return isFailingAssertions; }; -export const getHealthSummaryIcon = (healths: Health[], type: HealthSummaryIconType = HealthSummaryIconType.FILLED) => { +export const getHealthSummaryIcon = ( + healths: Health[], + type: HealthSummaryIconType = HealthSummaryIconType.FILLED, + fontSize = 16, +) => { const unhealthy = isUnhealthy(healths); return unhealthy - ? (type === HealthSummaryIconType.FILLED && ) || ( - - ) + ? (type === HealthSummaryIconType.FILLED && ( + + )) || : undefined; }; diff --git a/datahub-web-react/src/graphql/lineage.graphql b/datahub-web-react/src/graphql/lineage.graphql index 61c79abf929a0..52385dee8631a 100644 --- a/datahub-web-react/src/graphql/lineage.graphql +++ b/datahub-web-react/src/graphql/lineage.graphql @@ -198,6 +198,12 @@ fragment lineageNodeProperties on EntityWithRelationships { path } } + health { + type + status + message + causes + } } ... on MLModelGroup { urn From 19ce0036c710e5d85eda1c2d5124e8dd638bb8ac Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Tue, 29 Aug 2023 13:37:27 -0400 Subject: [PATCH 10/41] build(ingest): Pin mypy-boto3-sagemaker directly (#8746) --- metadata-ingestion/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 9195dab7bf5b7..f0b66f8bbfb96 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -421,6 +421,7 @@ def get_long_description(): # The boto3-stubs package seems to have regularly breaking minor releases, # we pin to a specific version to avoid this. "boto3-stubs[s3,glue,sagemaker,sts]==1.28.15", + "mypy-boto3-sagemaker==1.28.15", # For some reason, above pin only restricts `mypy-boto3-sagemaker<1.29.0,>=1.28.0` "types-tabulate", # avrogen package requires this "types-pytz", From 40d17f00eafa3c952b2a87a857c1b381de6f7796 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Tue, 29 Aug 2023 14:33:40 -0400 Subject: [PATCH 11/41] feat(ingest/datahub): Improvements, bug fixes, and docs (#8735) --- .../docs/sources/datahub/README.md | 4 ++ .../docs/sources/datahub/datahub_pre.md | 66 +++++++++++++++++ .../docs/sources/datahub/datahub_recipe.yml | 30 ++++++++ .../ingestion/source/datahub/config.py | 41 ++++++----- ...l_reader.py => datahub_database_reader.py} | 45 +++++++----- .../source/datahub/datahub_kafka_reader.py | 9 ++- .../source/datahub/datahub_source.py | 70 +++++++++++++------ .../ingestion/source/datahub/report.py | 9 ++- .../datahub/ingestion/source/datahub/state.py | 10 +-- .../datahub/testing/compare_metadata_json.py | 1 - .../state/test_checkpoint.py | 4 +- 11 files changed, 221 insertions(+), 68 deletions(-) create mode 100644 metadata-ingestion/docs/sources/datahub/README.md create mode 100644 metadata-ingestion/docs/sources/datahub/datahub_pre.md create mode 100644 metadata-ingestion/docs/sources/datahub/datahub_recipe.yml rename metadata-ingestion/src/datahub/ingestion/source/datahub/{datahub_mysql_reader.py => datahub_database_reader.py} (67%) diff --git a/metadata-ingestion/docs/sources/datahub/README.md b/metadata-ingestion/docs/sources/datahub/README.md new file mode 100644 index 0000000000000..45afc6e166889 --- /dev/null +++ b/metadata-ingestion/docs/sources/datahub/README.md @@ -0,0 +1,4 @@ +Migrate data from one DataHub instance to another. + +Requires direct access to the database, kafka broker, and kafka schema registry +of the source DataHub instance. diff --git a/metadata-ingestion/docs/sources/datahub/datahub_pre.md b/metadata-ingestion/docs/sources/datahub/datahub_pre.md new file mode 100644 index 0000000000000..c98cce7047836 --- /dev/null +++ b/metadata-ingestion/docs/sources/datahub/datahub_pre.md @@ -0,0 +1,66 @@ +### Overview + +This source pulls data from two locations: +- The DataHub database, containing a single table holding all versioned aspects +- The DataHub Kafka cluster, reading from the [MCL Log](../../../../docs/what/mxe.md#metadata-change-log-mcl) +topic for timeseries aspects. + +All data is first read from the database, before timeseries data is ingested from kafka. +To prevent this source from potentially running forever, it will not ingest data produced after the +datahub_source ingestion job is started. This `stop_time` is reflected in the report. + +Data from the database and kafka are read in chronological order, specifically by the +createdon timestamp in the database and by kafka offset per partition. In order to +properly read from the database, please ensure that the `createdon` column is indexed. +Newly created databases should have this index, named `timeIndex`, by default, but older +ones you may have to create yourself, with the statement: + +``` +CREATE INDEX timeIndex ON metadata_aspect_v2 (createdon); +``` + +*If you do not have this index, the source may run incredibly slowly and produce +significant database load.* + +#### Stateful Ingestion +On first run, the source will read from the earliest data in the database and the earliest +kafka offsets. Every `commit_state_interval` (default 1000) records, the source will store +a checkpoint to remember its place, i.e. the last createdon timestamp and kafka offsets. +This allows you to stop and restart the source without losing much progress, but note that +you will re-ingest some data at the start of the new run. + +If any errors are encountered in the ingestion process, e.g. we are unable to emit an aspect +due to network errors, the source will keep running, but will stop committing checkpoints, +unless `commit_with_parse_errors` (default `false`) is set. Thus, if you re-run the ingestion, +you can re-ingest the data that was missed, but note it will all re-ingest all subsequent data. + +If you want to re-ingest all data, you can set a different `pipeline_name` in your recipe, +or set `stateful_ingestion.ignore_old_state`: + +```yaml +source: + config: + # ... connection config, etc. + stateful_ingestion: + enabled: true + ignore_old_state: true +``` + +#### Limitations +- Can only pull timeseries aspects retained by Kafka, which by default lasts 90 days. +- Does not detect hard timeseries deletions, e.g. if via a `datahub delete` command using the CLI. +Therefore, if you deleted data in this way, it will still exist in the destination instance. +- If you have a significant amount of aspects with the exact same `createdon` timestamp, +stateful ingestion will not be able to save checkpoints partially through that timestamp. +On a subsequent run, all aspects for that timestamp will be ingested. + +#### Performance +On your destination DataHub instance, we suggest the following settings: +- Enable [async ingestion](../../../../docs/deploy/environment-vars.md#ingestion) +- Use standalone consumers +([mae-consumer](../../../../metadata-jobs/mae-consumer-job/README.md) +and [mce-consumer](../../../../metadata-jobs/mce-consumer-job/README.md)) + * If you are migrating large amounts of data, consider scaling consumer replicas. +- Increase the number of gms pods to add redundancy and increase resilience to node evictions + * If you are migrating large amounts of data, consider increasing elasticsearch's + thread count via the `ELASTICSEARCH_THREAD_COUNT` environment variable. diff --git a/metadata-ingestion/docs/sources/datahub/datahub_recipe.yml b/metadata-ingestion/docs/sources/datahub/datahub_recipe.yml new file mode 100644 index 0000000000000..cb7fc97a39b9f --- /dev/null +++ b/metadata-ingestion/docs/sources/datahub/datahub_recipe.yml @@ -0,0 +1,30 @@ +pipeline_name: datahub_source_1 +datahub_api: + server: "http://localhost:8080" # Migrate data from DataHub instance on localhost:8080 + token: "" +source: + type: datahub + config: + include_all_versions: false + database_connection: + scheme: "mysql+pymysql" # or "postgresql+psycopg2" for Postgres + host_port: ":" + username: "" + password: "" + database: "" + kafka_connection: + bootstrap: ":9092" + schema_registry_url: ":8081" + stateful_ingestion: + enabled: true + ignore_old_state: false + extractor_config: + set_system_metadata: false # Replicate system metadata + +# Here, we write to a DataHub instance +# You can also use a different sink, e.g. to write the data to a file instead +sink: + type: datahub + config: + server: "" + token: "" diff --git a/metadata-ingestion/src/datahub/ingestion/source/datahub/config.py b/metadata-ingestion/src/datahub/ingestion/source/datahub/config.py index a054067d92334..053d136305527 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/datahub/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/datahub/config.py @@ -1,27 +1,27 @@ from typing import Optional -from pydantic import Field +from pydantic import Field, root_validator from datahub.configuration.kafka import KafkaConsumerConnectionConfig -from datahub.ingestion.source.sql.mysql import MySQLConnectionConfig +from datahub.ingestion.source.sql.sql_config import SQLAlchemyConnectionConfig from datahub.ingestion.source.state.stateful_ingestion_base import ( StatefulIngestionConfig, StatefulIngestionConfigBase, ) -DEFAULT_MYSQL_TABLE_NAME = "metadata_aspect_v2" +DEFAULT_DATABASE_TABLE_NAME = "metadata_aspect_v2" DEFAULT_KAFKA_TOPIC_NAME = "MetadataChangeLog_Timeseries_v1" -DEFAULT_MYSQL_BATCH_SIZE = 10_000 +DEFAULT_DATABASE_BATCH_SIZE = 10_000 class DataHubSourceConfig(StatefulIngestionConfigBase): - mysql_connection: MySQLConnectionConfig = Field( - default=MySQLConnectionConfig(), - description="MySQL connection config", + database_connection: Optional[SQLAlchemyConnectionConfig] = Field( + default=None, + description="Database connection config", ) - kafka_connection: KafkaConsumerConnectionConfig = Field( - default=KafkaConsumerConnectionConfig(), + kafka_connection: Optional[KafkaConsumerConnectionConfig] = Field( + default=None, description="Kafka connection config", ) @@ -29,18 +29,18 @@ class DataHubSourceConfig(StatefulIngestionConfigBase): default=False, description=( "If enabled, include all versions of each aspect. " - "Otherwise, only include the latest version of each aspect." + "Otherwise, only include the latest version of each aspect. " ), ) - mysql_batch_size: int = Field( - default=DEFAULT_MYSQL_BATCH_SIZE, - description="Number of records to fetch from MySQL at a time", + database_query_batch_size: int = Field( + default=DEFAULT_DATABASE_BATCH_SIZE, + description="Number of records to fetch from the database at a time", ) - mysql_table_name: str = Field( - default=DEFAULT_MYSQL_TABLE_NAME, - description="Name of MySQL table containing all versioned aspects", + database_table_name: str = Field( + default=DEFAULT_DATABASE_TABLE_NAME, + description="Name of database table containing all versioned aspects", ) kafka_topic_name: str = Field( @@ -66,3 +66,12 @@ class DataHubSourceConfig(StatefulIngestionConfigBase): "Enable if you want to ignore the errors." ), ) + + @root_validator + def check_ingesting_data(cls, values): + if not values.get("database_connection") and not values.get("kafka_connection"): + raise ValueError( + "Your current config will not ingest any data." + " Please specify at least one of `database_connection` or `kafka_connection`, ideally both." + ) + return values diff --git a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_mysql_reader.py b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py similarity index 67% rename from metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_mysql_reader.py rename to metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py index adf4c1db57395..39702ba3ce347 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_mysql_reader.py +++ b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_database_reader.py @@ -10,33 +10,42 @@ from datahub.emitter.serialization_helper import post_json_transform from datahub.ingestion.source.datahub.config import DataHubSourceConfig from datahub.ingestion.source.datahub.report import DataHubSourceReport +from datahub.ingestion.source.sql.sql_config import SQLAlchemyConnectionConfig from datahub.metadata.schema_classes import ChangeTypeClass, SystemMetadataClass from datahub.utilities.lossy_collections import LossyDict, LossyList logger = logging.getLogger(__name__) -MYSQL_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" +# Should work for at least mysql, mariadb, postgres +DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" -class DataHubMySQLReader: - def __init__(self, config: DataHubSourceConfig, report: DataHubSourceReport): +class DataHubDatabaseReader: + def __init__( + self, + config: DataHubSourceConfig, + connection_config: SQLAlchemyConnectionConfig, + report: DataHubSourceReport, + ): self.config = config self.report = report self.engine = create_engine( - url=config.mysql_connection.get_sql_alchemy_url(), - **config.mysql_connection.options, + url=connection_config.get_sql_alchemy_url(), + **connection_config.options, ) @property def query(self) -> str: # May repeat rows for the same date - # Offset is generally 0, unless we repeat the same date twice + # Offset is generally 0, unless we repeat the same createdon twice return f""" SELECT urn, aspect, metadata, systemmetadata, createdon - FROM `{self.config.mysql_table_name}` + FROM `{self.config.database_table_name}` WHERE createdon >= %(since_createdon)s {"" if self.config.include_all_versions else "AND version = 0"} - ORDER BY createdon, urn, aspect, version # Ensures stable ordering + ORDER BY createdon, urn, aspect, # Ensure stable order, chronological per (urn, aspect) + CASE WHEN version = 0 THEN 1 ELSE 0 END, version + # Version 0 last, only when createdon is the same. Otherwise relies on createdon order LIMIT %(limit)s OFFSET %(offset)s """ @@ -48,11 +57,11 @@ def get_aspects( ts = from_createdon offset = 0 while ts.timestamp() <= stop_time.timestamp(): - logger.debug(f"Polling MySQL aspects from {ts}") + logger.debug(f"Polling database aspects from {ts}") rows = conn.execute( self.query, - since_createdon=ts.strftime(MYSQL_DATETIME_FORMAT), - limit=self.config.mysql_batch_size, + since_createdon=ts.strftime(DATETIME_FORMAT), + limit=self.config.database_query_batch_size, offset=offset, ) if not rows.rowcount: @@ -64,7 +73,7 @@ def get_aspects( row_dict = row._asdict() else: row_dict = dict(row) - mcp = self._parse_mysql_row(row_dict) + mcp = self._parse_row(row_dict) if mcp: yield mcp, row_dict["createdon"] @@ -72,15 +81,13 @@ def get_aspects( offset += i else: ts = row_dict["createdon"] - print(ts) offset = 0 - def _parse_mysql_row(self, d: Dict) -> Optional[MetadataChangeProposalWrapper]: + def _parse_row(self, d: Dict) -> Optional[MetadataChangeProposalWrapper]: try: json_aspect = post_json_transform(json.loads(d["metadata"])) json_metadata = post_json_transform(json.loads(d["systemmetadata"] or "{}")) system_metadata = SystemMetadataClass.from_obj(json_metadata) - system_metadata.lastObserved = int(d["createdon"].timestamp() * 1000) return MetadataChangeProposalWrapper( entityUrn=d["urn"], aspect=ASPECT_MAP[d["aspect"]].from_obj(json_aspect), @@ -91,8 +98,8 @@ def _parse_mysql_row(self, d: Dict) -> Optional[MetadataChangeProposalWrapper]: logger.warning( f"Failed to parse metadata for {d['urn']}: {e}", exc_info=True ) - self.report.num_mysql_parse_errors += 1 - self.report.mysql_parse_errors.setdefault(str(e), LossyDict()).setdefault( - d["aspect"], LossyList() - ).append(d["urn"]) + self.report.num_database_parse_errors += 1 + self.report.database_parse_errors.setdefault( + str(e), LossyDict() + ).setdefault(d["aspect"], LossyList()).append(d["urn"]) return None diff --git a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_kafka_reader.py b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_kafka_reader.py index b165d70dd53b0..d9e53e87c2cea 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_kafka_reader.py +++ b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_kafka_reader.py @@ -11,6 +11,7 @@ from confluent_kafka.schema_registry import SchemaRegistryClient from confluent_kafka.schema_registry.avro import AvroDeserializer +from datahub.configuration.kafka import KafkaConsumerConnectionConfig from datahub.ingestion.api.closeable import Closeable from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.source.datahub.config import DataHubSourceConfig @@ -27,10 +28,12 @@ class DataHubKafkaReader(Closeable): def __init__( self, config: DataHubSourceConfig, + connection_config: KafkaConsumerConnectionConfig, report: DataHubSourceReport, ctx: PipelineContext, ): self.config = config + self.connection_config = connection_config self.report = report self.group_id = f"{KAFKA_GROUP_PREFIX}-{ctx.pipeline_name}" @@ -38,13 +41,13 @@ def __enter__(self) -> "DataHubKafkaReader": self.consumer = DeserializingConsumer( { "group.id": self.group_id, - "bootstrap.servers": self.config.kafka_connection.bootstrap, - **self.config.kafka_connection.consumer_config, + "bootstrap.servers": self.connection_config.bootstrap, + **self.connection_config.consumer_config, "auto.offset.reset": "earliest", "enable.auto.commit": False, "value.deserializer": AvroDeserializer( schema_registry_client=SchemaRegistryClient( - {"url": self.config.kafka_connection.schema_registry_url} + {"url": self.connection_config.schema_registry_url} ), return_record_name=True, ), diff --git a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_source.py b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_source.py index 636e65a244dad..2368febe1ff57 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/datahub/datahub_source.py @@ -1,5 +1,6 @@ import logging from datetime import datetime, timezone +from functools import partial from typing import Dict, Iterable, List, Optional from datahub.emitter.mcp import MetadataChangeProposalWrapper @@ -11,10 +12,13 @@ support_status, ) from datahub.ingestion.api.source import MetadataWorkUnitProcessor, SourceReport +from datahub.ingestion.api.source_helpers import auto_workunit_reporter from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.datahub.config import DataHubSourceConfig +from datahub.ingestion.source.datahub.datahub_database_reader import ( + DataHubDatabaseReader, +) from datahub.ingestion.source.datahub.datahub_kafka_reader import DataHubKafkaReader -from datahub.ingestion.source.datahub.datahub_mysql_reader import DataHubMySQLReader from datahub.ingestion.source.datahub.report import DataHubSourceReport from datahub.ingestion.source.datahub.state import StatefulDataHubIngestionHandler from datahub.ingestion.source.state.stateful_ingestion_base import ( @@ -46,30 +50,50 @@ def get_report(self) -> SourceReport: return self.report def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: - return [] # Exactly replicate data from DataHub source + # Exactly replicate data from DataHub source + return [partial(auto_workunit_reporter, self.get_report())] def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: - stop_time = datetime.now(tz=timezone.utc) - logger.info(f"Ingesting DataHub metadata up until roughly {stop_time}") + self.report.stop_time = datetime.now(tz=timezone.utc) + logger.info(f"Ingesting DataHub metadata up until {self.report.stop_time}") state = self.stateful_ingestion_handler.get_last_run_state() - yield from self._get_mysql_workunits(state.mysql_createdon_datetime, stop_time) - self._commit_progress() - yield from self._get_kafka_workunits(state.kafka_offsets, stop_time) - self._commit_progress() - def _get_mysql_workunits( - self, from_createdon: datetime, stop_time: datetime + if self.config.database_connection is not None: + yield from self._get_database_workunits( + from_createdon=state.database_createdon_datetime + ) + self._commit_progress() + else: + logger.info( + "Skipping ingestion of versioned aspects as no database_connection provided" + ) + + if self.config.kafka_connection is not None: + yield from self._get_kafka_workunits(from_offsets=state.kafka_offsets) + self._commit_progress() + else: + logger.info( + "Skipping ingestion of timeseries aspects as no kafka_connection provided" + ) + + def _get_database_workunits( + self, from_createdon: datetime ) -> Iterable[MetadataWorkUnit]: - logger.info(f"Fetching MySQL aspects from {from_createdon}") - reader = DataHubMySQLReader(self.config, self.report) - mcps = reader.get_aspects(from_createdon, stop_time) + if self.config.database_connection is None: + return + + logger.info(f"Fetching database aspects starting from {from_createdon}") + reader = DataHubDatabaseReader( + self.config, self.config.database_connection, self.report + ) + mcps = reader.get_aspects(from_createdon, self.report.stop_time) for i, (mcp, createdon) in enumerate(mcps): yield mcp.as_workunit() - self.report.num_mysql_aspects_ingested += 1 + self.report.num_database_aspects_ingested += 1 if ( self.config.commit_with_parse_errors - or not self.report.num_mysql_parse_errors + or not self.report.num_database_parse_errors ): self.stateful_ingestion_handler.update_checkpoint( last_createdon=createdon @@ -77,12 +101,18 @@ def _get_mysql_workunits( self._commit_progress(i) def _get_kafka_workunits( - self, from_offsets: Dict[int, int], stop_time: datetime + self, from_offsets: Dict[int, int] ) -> Iterable[MetadataWorkUnit]: - logger.info(f"Fetching timeseries aspects from kafka until {stop_time}") - - with DataHubKafkaReader(self.config, self.report, self.ctx) as reader: - mcls = reader.get_mcls(from_offsets=from_offsets, stop_time=stop_time) + if self.config.kafka_connection is None: + return + + logger.info("Fetching timeseries aspects from kafka") + with DataHubKafkaReader( + self.config, self.config.kafka_connection, self.report, self.ctx + ) as reader: + mcls = reader.get_mcls( + from_offsets=from_offsets, stop_time=self.report.stop_time + ) for i, (mcl, offset) in enumerate(mcls): mcp = MetadataChangeProposalWrapper.try_from_mcl(mcl) if mcp.changeType == ChangeTypeClass.DELETE: diff --git a/metadata-ingestion/src/datahub/ingestion/source/datahub/report.py b/metadata-ingestion/src/datahub/ingestion/source/datahub/report.py index 3aa93d6a4577b..73e5a798a1553 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/datahub/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/datahub/report.py @@ -1,4 +1,5 @@ from dataclasses import dataclass, field +from datetime import datetime, timezone from datahub.ingestion.source.state.stateful_ingestion_base import ( StatefulIngestionReport, @@ -8,10 +9,12 @@ @dataclass class DataHubSourceReport(StatefulIngestionReport): - num_mysql_aspects_ingested: int = 0 - num_mysql_parse_errors: int = 0 + stop_time: datetime = field(default_factory=lambda: datetime.now(tz=timezone.utc)) + + num_database_aspects_ingested: int = 0 + num_database_parse_errors: int = 0 # error -> aspect -> [urn] - mysql_parse_errors: LossyDict[str, LossyDict[str, LossyList[str]]] = field( + database_parse_errors: LossyDict[str, LossyDict[str, LossyList[str]]] = field( default_factory=LossyDict ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/datahub/state.py b/metadata-ingestion/src/datahub/ingestion/source/datahub/state.py index deea9772fae20..4bedd331a9aea 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/datahub/state.py +++ b/metadata-ingestion/src/datahub/ingestion/source/datahub/state.py @@ -16,14 +16,16 @@ class DataHubIngestionState(CheckpointStateBase): - mysql_createdon_ts: NonNegativeInt = 0 + database_createdon_ts: NonNegativeInt = 0 # Maps partition -> offset kafka_offsets: Dict[int, NonNegativeInt] = Field(default_factory=dict) @property - def mysql_createdon_datetime(self) -> datetime: - return datetime.fromtimestamp(self.mysql_createdon_ts / 1000, tz=timezone.utc) + def database_createdon_datetime(self) -> datetime: + return datetime.fromtimestamp( + self.database_createdon_ts / 1000, tz=timezone.utc + ) class PartitionOffset(NamedTuple): @@ -81,7 +83,7 @@ def update_checkpoint( if cur_checkpoint: cur_state = cast(DataHubIngestionState, cur_checkpoint.state) if last_createdon: - cur_state.mysql_createdon_ts = int(last_createdon.timestamp() * 1000) + cur_state.database_createdon_ts = int(last_createdon.timestamp() * 1000) if last_offset: cur_state.kafka_offsets[last_offset.partition] = last_offset.offset + 1 diff --git a/metadata-ingestion/src/datahub/testing/compare_metadata_json.py b/metadata-ingestion/src/datahub/testing/compare_metadata_json.py index b017afc8c1448..5c52e1ab4f0b3 100644 --- a/metadata-ingestion/src/datahub/testing/compare_metadata_json.py +++ b/metadata-ingestion/src/datahub/testing/compare_metadata_json.py @@ -55,7 +55,6 @@ def assert_metadata_files_equal( output = load_json_file(output_path) if update_golden and not golden_exists: - golden = load_json_file(output_path) shutil.copyfile(str(output_path), str(golden_path)) return else: diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py index 532ab69d1c6b1..712ae2066b728 100644 --- a/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py +++ b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone from typing import Dict, List import pydantic @@ -29,7 +29,7 @@ def _assert_checkpoint_deserialization( ) -> Checkpoint: # Serialize a checkpoint aspect with the previous state. checkpoint_aspect = DatahubIngestionCheckpointClass( - timestampMillis=int(datetime.now().timestamp() * 1000), + timestampMillis=int(datetime.now(tz=timezone.utc).timestamp() * 1000), pipelineName=test_pipeline_name, platformInstanceId="this-can-be-anything-and-will-be-ignored", config="this-is-also-ignored", From df0113c71322f93418ba29d3fc4ec869ba3ff3ab Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 29 Aug 2023 13:08:03 -0700 Subject: [PATCH 12/41] docs(obseve): Adding Volume Assertion Guide (#8706) --- docs-website/sidebars.js | 5 +- .../observe/freshness-assertions.md | 8 +- .../observe/volume-assertions.md | 350 ++++++++++++++++++ 3 files changed, 358 insertions(+), 5 deletions(-) create mode 100644 docs/managed-datahub/observe/volume-assertions.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 51a57fc41dd36..c3a3bfc3a611c 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -418,7 +418,10 @@ module.exports = { }, "docs/act-on-metadata/impact-analysis", { - Observability: ["docs/managed-datahub/observe/freshness-assertions"], + Observability: [ + "docs/managed-datahub/observe/freshness-assertions", + "docs/managed-datahub/observe/volume-assertions", + ], }, ], }, diff --git a/docs/managed-datahub/observe/freshness-assertions.md b/docs/managed-datahub/observe/freshness-assertions.md index 54b3134151d3a..d10d1f18eba5c 100644 --- a/docs/managed-datahub/observe/freshness-assertions.md +++ b/docs/managed-datahub/observe/freshness-assertions.md @@ -59,7 +59,7 @@ Tables. For example, imagine that we work for a company with a Snowflake Table that stores user clicks collected from our e-commerce website. This table is updated with new data on a specific cadence: once per hour (In practice, daily or even weekly are also common). In turn, there is a downstream Business Analytics Dashboard in Looker that shows important metrics like -the number of people clicking our "Daily Sale" banners, and this dashboard pulls is generated from data stored in our "clicks" table. +the number of people clicking our "Daily Sale" banners, and this dashboard is generated from data stored in our "clicks" table. It is important that our clicks Table continues to be updated each hour because if it stops being updated, it could mean that our downstream metrics dashboard becomes incorrect. And the risk of this situation is obvious: our organization may make bad decisions based on incomplete information. @@ -291,10 +291,10 @@ To create a Freshness Assertion Entity that checks whether a table has been upda mutation createFreshnessAssertion { createFreshnessAssertion( input: { - entityUrn: "" - type: DATASET_CHANGE + entityUrn: "", + type: DATASET_CHANGE, schedule: { - type: FIXED_INTERVAL + type: FIXED_INTERVAL, fixedInterval: { unit: HOUR, multiple: 8 } } } diff --git a/docs/managed-datahub/observe/volume-assertions.md b/docs/managed-datahub/observe/volume-assertions.md new file mode 100644 index 0000000000000..8c7676ca9afbb --- /dev/null +++ b/docs/managed-datahub/observe/volume-assertions.md @@ -0,0 +1,350 @@ +--- +description: This page provides an overview of working with DataHub Volume Assertions +--- +import FeatureAvailability from '@site/src/components/FeatureAvailability'; + + +# Volume Assertions + + + + +> ⚠️ The **Volume Assertions** feature is currently in private beta, part of the **Acryl Observe** module, and may only be available to a +> limited set of design partners. +> +> If you are interested in trying it and providing feedback, please reach out to your Acryl Customer Success +> representative. + +## Introduction + +Can you remember a time when the meaning of Data Warehouse Table that you depended on fundamentally changed, with little or no notice? +If the answer is yes, how did you find out? We'll take a guess - someone looking at an internal reporting dashboard or worse, a user using your your product, sounded an alarm when +a number looked a bit out of the ordinary. Perhaps your table initially tracked purchases made on your company's e-commerce web store, but suddenly began to include purchases made +through your company's new mobile app. + +There are many reasons why an important Table on Snowflake, Redshift, or BigQuery may change in its meaning - application code bugs, new feature rollouts, +changes to key metric definitions, etc. Often times, these changes break important assumptions made about the data used in building key downstream data products +like reporting dashboards or data-driven product features. + +What if you could reduce the time to detect these incidents, so that the people responsible for the data were made aware of data +issues _before_ anyone else? With Acryl DataHub **Volume Assertions**, you can. + +Acryl DataHub allows users to define expectations about the normal volume, or size, of a particular warehouse Table, +and then monitor those expectations over time as the table grows and changes. + +In this article, we'll cover the basics of monitoring Volume Assertions - what they are, how to configure them, and more - so that you and your team can +start building trust in your most important data assets. + +Let's get started! + +## Support + +Volume Assertions are currently supported for: + +1. Snowflake +2. Redshift +3. BigQuery + +Note that an Ingestion Source _must_ be configured with the data platform of your choice in Acryl DataHub's **Ingestion** +tab. + +> Note that Volume Assertions are not yet supported if you are connecting to your warehouse +> using the DataHub CLI or a Remote Ingestion Executor. + +## What is a Volume Assertion? + +A **Volume Assertion** is a configurable Data Quality rule used to monitor a Data Warehouse Table +for unexpected or sudden changes in "volume", or row count. Volume Assertions can be particularly useful when you have frequently-changing +Tables which have a relatively stable pattern of growth or decline. + +For example, imagine that we work for a company with a Snowflake Table that stores user clicks collected from our e-commerce website. +This table is updated with new data on a specific cadence: once per hour (In practice, daily or even weekly are also common). +In turn, there is a downstream Business Analytics Dashboard in Looker that shows important metrics like +the number of people clicking our "Daily Sale" banners, and this dashboard is generated from data stored in our "clicks" table. +It is important that our clicks Table is updated with the correct number of rows each hour, else it could mean +that our downstream metrics dashboard becomes incorrect. The risk of this situation is obvious: our organization +may make bad decisions based on incomplete information. + +In such cases, we can use a **Volume Assertion** that checks whether the Snowflake "clicks" Table is growing in an expected +way, and that there are no sudden increases or sudden decreases in the rows being added or removed from the table. +If too many rows are added or removed within an hour, we can notify key stakeholders and begin to root cause before the problem impacts stakeholders of the data. + +### Anatomy of a Volume Assertion + +At the most basic level, **Volume Assertions** consist of a few important parts: + +1. An **Evaluation Schedule** +2. A **Volume Condition** +2. A **Volume Source** + +In this section, we'll give an overview of each. + +#### 1. Evaluation Schedule + +The **Evaluation Schedule**: This defines how often to check a given warehouse Table for its volume. This should usually +be configured to match the expected change frequency of the Table, although it can also be less frequently depending +on the requirements. You can also specify specific days of the week, hours in the day, or even +minutes in an hour. + + +#### 2. Volume Condition + +The **Volume Condition**: This defines the type of condition that we'd like to monitor, or when the Assertion +should result in failure. + +There are a 2 different categories of conditions: **Total** Volume and **Change** Volume. + +_Total_ volume conditions are those which are defined against the point-in-time total row count for a table. They allow you to specify conditions like: + +1. **Table has too many rows**: The table should always have less than 1000 rows +2. **Table has too few rows**: The table should always have more than 1000 rows +3. **Table row count is outside a range**: The table should always have between 1000 and 2000 rows. + +_Change_ volume conditions are those which are defined against the growth or decline rate of a table, measured between subsequent checks +of the table volume. They allow you to specify conditions like: + +1. **Table growth is too fast**: When the table volume is checked, it should have < 1000 more rows than it had during the previous check. +2. **Table growth is too slow**: When the table volume is checked, it should have > 1000 more rows than it had during the previous check. +3. **Table growth is outside a range**: When the table volume is checked, it should have between 1000 and 2000 more rows than it had during the previous check. + +For change volume conditions, both _absolute_ row count deltas and relative percentage deltas are supported for identifying +table that are following an abnormal pattern of growth. + + +#### 3. Volume Source + +The **Volume Source**: This is the mechanism that Acryl DataHub should use to determine the table volume (row count). The supported +source types vary by the platform, but generally fall into these categories: + +- **Information Schema**: A system Table that is exposed by the Data Warehouse which contains live information about the Databases + and Tables stored inside the Data Warehouse, including their row count. It is usually efficient to check, but can in some cases be slightly delayed to update + once a change has been made to a table. + +- **Query**: A `COUNT(*)` query is used to retrieve the latest row count for a table, with optional SQL filters applied (depending on platform). + This can be less efficient to check depending on the size of the table. This approach is more portable, as it does not involve + system warehouse tables, it is also easily portable across Data Warehouse and Data Lake providers. + +Volume Assertions also have an off switch: they can be started or stopped at any time with the click of button. + + +## Creating a Volume Assertion + +### Prerequisites + +1. **Permissions**: To create or delete Volume Assertions for a specific entity on DataHub, you'll need to be granted the + `Edit Assertions` and `Edit Monitors` privileges for the entity. This is granted to Entity owners by default. + +2. **Data Platform Connection**: In order to create a Volume Assertion, you'll need to have an **Ingestion Source** configured to your + Data Platform: Snowflake, BigQuery, or Redshift under the **Integrations** tab. + +Once these are in place, you're ready to create your Volume Assertions! + +### Steps + +1. Navigate to the Table that to monitor for volume +2. Click the **Validations** tab + +

+ +

+ +3. Click **+ Create Assertion** + +

+ +

+ +4. Choose **Volume** + +5. Configure the evaluation **schedule**. This is the frequency at which the assertion will be evaluated to produce a pass or fail result, and the times + when the table volume will be checked. + +6. Configure the evaluation **condition type**. This determines the cases in which the new assertion will fail when it is evaluated. + +

+ +

+ +7. (Optional) Click **Advanced** to customize the volume **source**. This is the mechanism that will be used to obtain the table + row count metric. Each Data Platform supports different options including Information Schema and Query. + +

+ +

+ +- **Information Schema**: Check the Data Platform system metadata tables to determine the table row count. +- **Query**: Issue a `COUNT(*)` query to the table to determine the row count. + +8. Click **Next** +9. Configure actions that should be taken when the Volume Assertion passes or fails + +

+ +

+ +- **Raise incident**: Automatically raise a new DataHub `Volume` Incident for the Table whenever the Volume Assertion is failing. This + may indicate that the Table is unfit for consumption. Configure Slack Notifications under **Settings** to be notified when + an incident is created due to an Assertion failure. +- **Resolve incident**: Automatically resolved any incidents that were raised due to failures in this Volume Assertion. Note that + any other incidents will not be impacted. + +10. Click **Save**. + +And that's it! DataHub will now begin to monitor your Volume Assertion for the table. + +To view the time of the next Volume Assertion evaluation, simply click **Volume** and then click on your +new Assertion: + +

+ +

+ +Once your assertion has run, you will begin to see Success or Failure status for the Table + +

+ +

+ + +## Stopping a Volume Assertion + +In order to temporarily stop the evaluation of a Volume Assertion: + +1. Navigate to the **Validations** tab of the Table with the assertion +2. Click **Volume** to open the Volume Assertions list +3. Click the three-dot menu on the right side of the assertion you want to disable +4. Click **Stop** + +

+ +

+ +To resume the Volume Assertion, simply click **Turn On**. + +

+ +

+ + +## Smart Assertions ⚡ + +As part of the **Acryl Observe** module, Acryl DataHub also provides **Smart Assertions** out of the box. These are +dynamic, AI-powered Volume Assertions that you can use to monitor the volume of important warehouse Tables, without +requiring any manual setup. + +If Acryl DataHub is able to detect a pattern in the volume of a Snowflake, Redshift, or BigQuery Table, you'll find +a recommended Smart Assertion under the `Validations` tab on the Table profile page: + +

+ +

+ +In order to enable it, simply click **Turn On**. From this point forward, the Smart Assertion will check for changes on a cadence +based on the Table history. + +Don't need it anymore? Smart Assertions can just as easily be turned off by clicking the three-dot "more" button and then **Stop**. + + +## Creating Volume Assertions via API + +Under the hood, Acryl DataHub implements Volume Assertion Monitoring using two "entity" concepts: + +- **Assertion**: The specific expectation for volume, e.g. "The table was changed int the past 7 hours" + or "The table is changed on a schedule of every day by 8am". This is the "what". + +- **Monitor**: The process responsible for evaluating the Assertion on a given evaluation schedule and using specific + mechanisms. This is the "how". + +Note that to create or delete Assertions and Monitors for a specific entity on DataHub, you'll need the +`Edit Assertions` and `Edit Monitors` privileges for it. + +#### GraphQL + +In order to create a Volume Assertion that is being monitored on a specific **Evaluation Schedule**, you'll need to use 2 +GraphQL mutation queries to create a Volume Assertion entity and create an Assertion Monitor entity responsible for evaluating it. + +Start by creating the Volume Assertion entity using the `createVolumeAssertion` query and hang on to the 'urn' field of the Assertion entity +you get back. Then continue by creating a Monitor entity using the `createAssertionMonitor`. + +##### Examples + +To create a Volume Assertion Entity that checks whether a table has been updated in the past 8 hours: + +```json +mutation createVolumeAssertion { + createVolumeAssertion( + input: { + entityUrn: "", + type: ROW_COUNT_TOTAL, + rowCountTotal: { + operator: BETWEEN, + parameters: { + minValue: { + "value": 10, + "type": NUMBER + }, + maxValue: { + "value": 20, + "type": NUMBER + } + } + } + } + ) { + urn +} +} +``` + +To create an assertion that specifies that the row count total should always fall between 10 and 20. + +The supported volume assertion types are `ROW_COUNT_TOTAL` and `ROW_COUNT_CHANGE`. Other (e.g. incrementing segment) types are not yet supported. +The supported operator types are `GREATER_THAN`, `GREATER_THAN_OR_EQUAL_TO`, `LESS_THAN`, `LESS_THAN_OR_EQUAL_TO`, and `BETWEEN` (requires minValue, maxValue). +The supported parameter types are `NUMBER`. + +To create an Assertion Monitor Entity that evaluates the volume assertion every 8 hours using the Information Schema: + +```json +mutation createAssertionMonitor { + createAssertionMonitor( + input: { + entityUrn: "", + assertionUrn: "", + schedule: { + cron: "0 */8 * * *", + timezone: "America/Los_Angeles" + }, + parameters: { + type: DATASET_VOLUME, + datasetVolumeParameters: { + sourceType: INFORMATION_SCHEMA, + } + } + } + ) { + urn + } +} +``` + +This entity defines _when_ to run the check (Using CRON format - every 8th hour) and _how_ to run the check (using the Information Schema). + +After creating the monitor, the new assertion will start to be evaluated every 8 hours in your selected timezone. + +You can delete assertions along with their monitors using GraphQL mutations: `deleteAssertion` and `deleteMonitor`. + +### Tips + +:::info +**Authorization** + +Remember to always provide a DataHub Personal Access Token when calling the GraphQL API. To do so, just add the 'Authorization' header as follows: + +``` +Authorization: Bearer +``` + +**Exploring GraphQL API** + +Also, remember that you can play with an interactive version of the Acryl GraphQL API at `https://your-account-id.acryl.io/api/graphiql` +::: From 277690331590cdcb09789feea94bacfccba9ccc6 Mon Sep 17 00:00:00 2001 From: skrydal Date: Tue, 29 Aug 2023 22:25:35 +0200 Subject: [PATCH 13/41] fix(ingest/okta): Removed code closing okta's event_loop (#8675) Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/source/identity/okta.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/identity/okta.py b/metadata-ingestion/src/datahub/ingestion/source/identity/okta.py index 5805790fe8bb7..5e8413bbb6f30 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/identity/okta.py +++ b/metadata-ingestion/src/datahub/ingestion/source/identity/okta.py @@ -303,11 +303,13 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: # This method can be called on the main thread or an async thread, so we must create a new loop if one doesn't exist # See https://docs.python.org/3/library/asyncio-eventloop.html for more info. + created_event_loop = False try: event_loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() except RuntimeError: event_loop = asyncio.new_event_loop() asyncio.set_event_loop(event_loop) + created_event_loop = True # Step 1: Produce MetadataWorkUnits for CorpGroups. okta_groups: Optional[Iterable[Group]] = None @@ -408,7 +410,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: ).as_workunit() # Step 4: Close the event loop - event_loop.close() + if created_event_loop: + event_loop.close() def get_report(self): return self.report From 44c9ba47dd0b9a485ce7882c2e21239bcce27722 Mon Sep 17 00:00:00 2001 From: Joshua Eilers Date: Tue, 29 Aug 2023 15:39:35 -0700 Subject: [PATCH 14/41] fix(highlight): disable full name highlight (#8750) --- datahub-web-react/src/app/entity/group/preview/Preview.tsx | 4 +--- datahub-web-react/src/app/entity/user/preview/Preview.tsx | 4 +--- datahub-web-react/src/app/preview/DefaultPreviewCard.tsx | 2 +- 3 files changed, 3 insertions(+), 7 deletions(-) diff --git a/datahub-web-react/src/app/entity/group/preview/Preview.tsx b/datahub-web-react/src/app/entity/group/preview/Preview.tsx index 67449b9a481f0..5b9a25e198cfe 100644 --- a/datahub-web-react/src/app/entity/group/preview/Preview.tsx +++ b/datahub-web-react/src/app/entity/group/preview/Preview.tsx @@ -88,9 +88,7 @@ export const Preview = ({ {entityRegistry.getEntityName(EntityType.CorpGroup)} - - {name ? : urn} - + {name ? : urn} {membersCount} members diff --git a/datahub-web-react/src/app/entity/user/preview/Preview.tsx b/datahub-web-react/src/app/entity/user/preview/Preview.tsx index 8893d4ab86786..05baefb295b98 100644 --- a/datahub-web-react/src/app/entity/user/preview/Preview.tsx +++ b/datahub-web-react/src/app/entity/user/preview/Preview.tsx @@ -81,9 +81,7 @@ export const Preview = ({ {entityRegistry.getEntityName(EntityType.CorpUser)} - - {name ? : urn} - + {name ? : urn} diff --git a/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx b/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx index 0d0a32f7750a8..319c8ed0a3e1d 100644 --- a/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx +++ b/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx @@ -292,7 +292,7 @@ export default function DefaultPreviewCard({ ) : ( - + )} From 2eb93b9ba5c397130d9ad9180db48acf766264fa Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 29 Aug 2023 16:47:34 -0700 Subject: [PATCH 15/41] fix(ui): hide pages from web crawlers (#8738) --- datahub-web-react/public/robots.txt | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/public/robots.txt b/datahub-web-react/public/robots.txt index e9e57dc4d41b9..7a00656bc3073 100644 --- a/datahub-web-react/public/robots.txt +++ b/datahub-web-react/public/robots.txt @@ -1,3 +1,6 @@ # https://www.robotstxt.org/robotstxt.html User-agent: * -Disallow: +Disallow: /api +Disallow: /gms +Disallow: /search +Disallow: /logOut From 97a77d8d0ba92d9dc4e190bc71cb73b47beedca5 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 29 Aug 2023 16:48:54 -0700 Subject: [PATCH 16/41] docs: add index pages for feature/deployment guides (#8723) --- docs-website/docusaurus.config.js | 4 ++-- docs-website/sidebars.js | 19 +++++++++++++++++-- docs/how/search.md | 8 -------- 3 files changed, 19 insertions(+), 12 deletions(-) diff --git a/docs-website/docusaurus.config.js b/docs-website/docusaurus.config.js index df69e8513fbfc..9bdba5f317542 100644 --- a/docs-website/docusaurus.config.js +++ b/docs-website/docusaurus.config.js @@ -178,8 +178,8 @@ module.exports = { appId: "RK0UG797F3", apiKey: "39d7eb90d8b31d464e309375a52d674f", indexName: "datahubproject", - // contextualSearch: true, - // searchParameters: {}, + insights: true, + contextualSearch: true, // debug: true, }, }, diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index c3a3bfc3a611c..64433a2615f34 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -138,7 +138,15 @@ module.exports = { ], }, { - Deployment: [ + type: "category", + label: "Deployment", + link: { + type: "generated-index", + title: "Deployment Guides", + description: + "Learn how to deploy DataHub to your environment, set up authentication, manage upgrades, and more.", + }, + items: [ // The purpose of this section is to provide the minimum steps required to deploy DataHub to the vendor of your choosing "docs/deploy/aws", "docs/deploy/gcp", @@ -395,7 +403,14 @@ module.exports = { ], }, { - Features: [ + type: "category", + label: "Features", + link: { + type: "generated-index", + title: "Feature Guides", + description: "Learn about the features of DataHub.", + }, + items: [ "docs/ui-ingestion", "docs/how/search", "docs/schema-history", diff --git a/docs/how/search.md b/docs/how/search.md index bf1c8e8632e24..6a5e85e547fc5 100644 --- a/docs/how/search.md +++ b/docs/how/search.md @@ -2,14 +2,6 @@ import FeatureAvailability from '@site/src/components/FeatureAvailability'; # About DataHub Search - - - - The **search bar** is an important mechanism for discovering data assets in DataHub. From the search bar, you can find Datasets, Columns, Dashboards, Charts, Data Pipelines, and more. Simply type in a term and press 'enter'. From bebee881006124cd725668314e40719246f051e1 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Wed, 30 Aug 2023 09:27:38 +0900 Subject: [PATCH 17/41] feat(docs): move versioned_sidebars to static-assets (#8743) --- .gitignore | 1 + docs-website/download_historical_versions.py | 50 +- .../version-0.10.5-sidebars.json | 594 ------------------ 3 files changed, 35 insertions(+), 610 deletions(-) delete mode 100644 docs-website/versioned_sidebars/version-0.10.5-sidebars.json diff --git a/.gitignore b/.gitignore index b6edbccf71125..49ab5c475096c 100644 --- a/.gitignore +++ b/.gitignore @@ -70,6 +70,7 @@ metadata-ingestion/generated/** # docs docs/generated/ docs-website/versioned_docs/ +docs-website/versioned_sidebars/ tmp* temp/** diff --git a/docs-website/download_historical_versions.py b/docs-website/download_historical_versions.py index a005445cb1497..83157edc1972c 100644 --- a/docs-website/download_historical_versions.py +++ b/docs-website/download_historical_versions.py @@ -1,7 +1,7 @@ +import json import os import tarfile import urllib.request -import json repo_url = "https://api.github.com/repos/datahub-project/static-assets" @@ -16,30 +16,36 @@ def download_file(url, destination): f.write(chunk) -def fetch_tar_urls(repo_url, folder_path): +def fetch_urls(repo_url: str, folder_path: str, file_format: str): api_url = f"{repo_url}/contents/{folder_path}" response = urllib.request.urlopen(api_url) - data = response.read().decode('utf-8') - tar_urls = [ - file["download_url"] for file in json.loads(data) if file["name"].endswith(".tar.gz") + data = response.read().decode("utf-8") + urls = [ + file["download_url"] + for file in json.loads(data) + if file["name"].endswith(file_format) ] - print(tar_urls) - return tar_urls + print(urls) + return urls -def main(): - folder_path = "versioned_docs" - destination_dir = "versioned_docs" +def extract_tar_file(destination_path): + with tarfile.open(destination_path, "r:gz") as tar: + tar.extractall() + os.remove(destination_path) + + +def download_versioned_docs(folder_path: str, destination_dir: str, file_format: str): if not os.path.exists(destination_dir): os.makedirs(destination_dir) - tar_urls = fetch_tar_urls(repo_url, folder_path) + urls = fetch_urls(repo_url, folder_path, file_format) - for url in tar_urls: + for url in urls: filename = os.path.basename(url) destination_path = os.path.join(destination_dir, filename) - version = '.'.join(filename.split('.')[:3]) + version = ".".join(filename.split(".")[:3]) extracted_path = os.path.join(destination_dir, version) print("extracted_path", extracted_path) if os.path.exists(extracted_path): @@ -48,13 +54,25 @@ def main(): try: download_file(url, destination_path) print(f"Downloaded {filename} to {destination_dir}") - with tarfile.open(destination_path, "r:gz") as tar: - tar.extractall() - os.remove(destination_path) + if file_format == ".tar.gz": + extract_tar_file(destination_path) except urllib.error.URLError as e: print(f"Error while downloading {filename}: {e}") continue +def main(): + download_versioned_docs( + folder_path="versioned_docs", + destination_dir="versioned_docs", + file_format=".tar.gz", + ) + download_versioned_docs( + folder_path="versioned_sidebars", + destination_dir="versioned_sidebars", + file_format=".json", + ) + + if __name__ == "__main__": main() diff --git a/docs-website/versioned_sidebars/version-0.10.5-sidebars.json b/docs-website/versioned_sidebars/version-0.10.5-sidebars.json deleted file mode 100644 index 67179075fc994..0000000000000 --- a/docs-website/versioned_sidebars/version-0.10.5-sidebars.json +++ /dev/null @@ -1,594 +0,0 @@ -{ - "overviewSidebar": [ - { - "label": "Getting Started", - "type": "category", - "collapsed": true, - "items": [ - { - "type": "doc", - "label": "Introduction", - "id": "docs/features" - }, - { - "type": "doc", - "label": "Quickstart", - "id": "docs/quickstart" - }, - { - "type": "link", - "label": "Demo", - "href": "https://demo.datahubproject.io/" - }, - "docs/what-is-datahub/datahub-concepts", - "docs/saas" - ] - }, - { - "Integrations": [ - { - "type": "doc", - "label": "Introduction", - "id": "metadata-ingestion/README" - }, - { - "Quickstart Guides": [ - { - "BigQuery": [ - "docs/quick-ingestion-guides/bigquery/overview", - "docs/quick-ingestion-guides/bigquery/setup", - "docs/quick-ingestion-guides/bigquery/configuration" - ] - }, - { - "Redshift": [ - "docs/quick-ingestion-guides/redshift/overview", - "docs/quick-ingestion-guides/redshift/setup", - "docs/quick-ingestion-guides/redshift/configuration" - ] - }, - { - "Snowflake": [ - "docs/quick-ingestion-guides/snowflake/overview", - "docs/quick-ingestion-guides/snowflake/setup", - "docs/quick-ingestion-guides/snowflake/configuration" - ] - }, - { - "Tableau": [ - "docs/quick-ingestion-guides/tableau/overview", - "docs/quick-ingestion-guides/tableau/setup", - "docs/quick-ingestion-guides/tableau/configuration" - ] - }, - { - "PowerBI": [ - "docs/quick-ingestion-guides/powerbi/overview", - "docs/quick-ingestion-guides/powerbi/setup", - "docs/quick-ingestion-guides/powerbi/configuration" - ] - } - ] - }, - { - "Sources": [ - { - "type": "doc", - "id": "docs/lineage/airflow", - "label": "Airflow" - }, - "metadata-integration/java/spark-lineage/README", - "metadata-ingestion/integration_docs/great-expectations", - "metadata-integration/java/datahub-protobuf/README", - { - "type": "autogenerated", - "dirName": "docs/generated/ingestion/sources" - } - ] - }, - { - "Sinks": [ - { - "type": "autogenerated", - "dirName": "metadata-ingestion/sink_docs" - } - ] - }, - { - "Transformers": [ - "metadata-ingestion/docs/transformer/intro", - "metadata-ingestion/docs/transformer/dataset_transformer" - ] - }, - { - "Advanced Guides": [ - { - "Scheduling Ingestion": [ - "metadata-ingestion/schedule_docs/intro", - "metadata-ingestion/schedule_docs/cron", - "metadata-ingestion/schedule_docs/airflow", - "metadata-ingestion/schedule_docs/kubernetes" - ] - }, - "docs/platform-instances", - "metadata-ingestion/docs/dev_guides/stateful", - "metadata-ingestion/docs/dev_guides/classification", - "metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source", - "metadata-ingestion/docs/dev_guides/sql_profiles" - ] - } - ] - }, - { - "Deployment": [ - "docs/deploy/aws", - "docs/deploy/gcp", - "docker/README", - "docs/deploy/kubernetes", - "docs/deploy/environment-vars", - { - "Authentication": [ - "docs/authentication/README", - "docs/authentication/concepts", - "docs/authentication/changing-default-credentials", - "docs/authentication/guides/add-users", - { - "Frontend Authentication": [ - "docs/authentication/guides/jaas", - { - "OIDC Authentication": [ - "docs/authentication/guides/sso/configure-oidc-react", - "docs/authentication/guides/sso/configure-oidc-react-google", - "docs/authentication/guides/sso/configure-oidc-react-okta", - "docs/authentication/guides/sso/configure-oidc-react-azure" - ] - } - ] - }, - "docs/authentication/introducing-metadata-service-authentication", - "docs/authentication/personal-access-tokens" - ] - }, - { - "Authorization": [ - "docs/authorization/README", - "docs/authorization/roles", - "docs/authorization/policies", - "docs/authorization/groups" - ] - }, - { - "Advanced Guides": [ - "docs/how/delete-metadata", - "docs/how/configuring-authorization-with-apache-ranger", - "docs/how/backup-datahub", - "docs/how/restore-indices", - "docs/advanced/db-retention", - "docs/advanced/monitoring", - "docs/how/extract-container-logs", - "docs/deploy/telemetry", - "docs/how/kafka-config", - "docs/deploy/confluent-cloud", - "docs/advanced/no-code-upgrade", - "docs/how/jattach-guide" - ] - }, - "docs/how/updating-datahub" - ] - }, - { - "API": [ - "docs/api/datahub-apis", - { - "GraphQL API": [ - { - "label": "Overview", - "type": "doc", - "id": "docs/api/graphql/overview" - }, - { - "Reference": [ - { - "type": "doc", - "label": "Queries", - "id": "graphql/queries" - }, - { - "type": "doc", - "label": "Mutations", - "id": "graphql/mutations" - }, - { - "type": "doc", - "label": "Objects", - "id": "graphql/objects" - }, - { - "type": "doc", - "label": "Inputs", - "id": "graphql/inputObjects" - }, - { - "type": "doc", - "label": "Interfaces", - "id": "graphql/interfaces" - }, - { - "type": "doc", - "label": "Unions", - "id": "graphql/unions" - }, - { - "type": "doc", - "label": "Enums", - "id": "graphql/enums" - }, - { - "type": "doc", - "label": "Scalars", - "id": "graphql/scalars" - } - ] - }, - { - "Guides": [ - { - "type": "doc", - "label": "How To Set Up GraphQL", - "id": "docs/api/graphql/how-to-set-up-graphql" - }, - { - "type": "doc", - "label": "Getting Started With GraphQL", - "id": "docs/api/graphql/getting-started" - }, - { - "type": "doc", - "label": "Access Token Management", - "id": "docs/api/graphql/token-management" - } - ] - } - ] - }, - { - "type": "doc", - "label": "OpenAPI", - "id": "docs/api/openapi/openapi-usage-guide" - }, - "docs/dev-guides/timeline", - { - "Rest.li API": [ - { - "type": "doc", - "label": "Rest.li API Guide", - "id": "docs/api/restli/restli-overview" - }, - { - "type": "doc", - "label": "Restore Indices", - "id": "docs/api/restli/restore-indices" - }, - { - "type": "doc", - "label": "Get Index Sizes", - "id": "docs/api/restli/get-index-sizes" - }, - { - "type": "doc", - "label": "Truncate Timeseries Aspect", - "id": "docs/api/restli/truncate-time-series-aspect" - }, - { - "type": "doc", - "label": "Get ElasticSearch Task Status Endpoint", - "id": "docs/api/restli/get-elastic-task-status" - }, - { - "type": "doc", - "label": "Evaluate Tests", - "id": "docs/api/restli/evaluate-tests" - }, - { - "type": "doc", - "label": "Aspect Versioning and Rest.li Modeling", - "id": "docs/advanced/aspect-versioning" - } - ] - }, - { - "Python SDK": [ - "metadata-ingestion/as-a-library", - { - "Python SDK Reference": [ - { - "type": "autogenerated", - "dirName": "python-sdk" - } - ] - } - ] - }, - "metadata-integration/java/as-a-library", - { - "API and SDK Guides": [ - "docs/advanced/patch", - "docs/api/tutorials/datasets", - "docs/api/tutorials/lineage", - "docs/api/tutorials/tags", - "docs/api/tutorials/terms", - "docs/api/tutorials/owners", - "docs/api/tutorials/domains", - "docs/api/tutorials/deprecation", - "docs/api/tutorials/descriptions", - "docs/api/tutorials/custom-properties", - "docs/api/tutorials/ml" - ] - }, - { - "type": "category", - "label": "DataHub CLI", - "link": { - "type": "doc", - "id": "docs/cli" - }, - "items": [ - "docs/datahub_lite" - ] - }, - { - "type": "category", - "label": "Datahub Actions", - "link": { - "type": "doc", - "id": "docs/act-on-metadata" - }, - "items": [ - "docs/actions/README", - "docs/actions/quickstart", - "docs/actions/concepts", - { - "Sources": [ - { - "type": "autogenerated", - "dirName": "docs/actions/sources" - } - ] - }, - { - "Events": [ - { - "type": "autogenerated", - "dirName": "docs/actions/events" - } - ] - }, - { - "Actions": [ - { - "type": "autogenerated", - "dirName": "docs/actions/actions" - } - ] - }, - { - "Guides": [ - { - "type": "autogenerated", - "dirName": "docs/actions/guides" - } - ] - } - ] - } - ] - }, - { - "Features": [ - "docs/ui-ingestion", - "docs/how/search", - "docs/schema-history", - "docs/domains", - "docs/dataproducts", - "docs/glossary/business-glossary", - "docs/tags", - "docs/ownership/ownership-types", - "docs/browse", - "docs/authorization/access-policies-guide", - "docs/features/dataset-usage-and-query-history", - "docs/posts", - "docs/sync-status", - "docs/lineage/lineage-feature-guide", - { - "type": "doc", - "id": "docs/tests/metadata-tests", - "className": "saasOnly" - }, - "docs/act-on-metadata/impact-analysis", - { - "Observability": [ - "docs/managed-datahub/observe/freshness-assertions" - ] - } - ] - }, - { - "Develop": [ - { - "DataHub Metadata Model": [ - "docs/modeling/metadata-model", - "docs/modeling/extending-the-metadata-model", - "docs/what/mxe", - { - "Entities": [ - { - "type": "autogenerated", - "dirName": "docs/generated/metamodel/entities" - } - ] - } - ] - }, - { - "Architecture": [ - "docs/architecture/architecture", - "docs/components", - "docs/architecture/metadata-ingestion", - "docs/architecture/metadata-serving", - "docs/architecture/docker-containers" - ] - }, - { - "Developing on DataHub": [ - "docs/developers", - "docs/docker/development", - "metadata-ingestion/developing", - "docs/api/graphql/graphql-endpoint-development", - { - "Modules": [ - "datahub-web-react/README", - "datahub-frontend/README", - "datahub-graphql-core/README", - "metadata-service/README", - "metadata-jobs/mae-consumer-job/README", - "metadata-jobs/mce-consumer-job/README" - ] - } - ] - }, - "docs/plugins", - { - "Troubleshooting": [ - "docs/troubleshooting/quickstart", - "docs/troubleshooting/build", - "docs/troubleshooting/general" - ] - }, - { - "Advanced": [ - "metadata-ingestion/docs/dev_guides/reporting_telemetry", - "docs/advanced/mcp-mcl", - "docker/datahub-upgrade/README", - "docs/advanced/no-code-modeling", - "datahub-web-react/src/app/analytics/README", - "docs/how/migrating-graph-service-implementation", - "docs/advanced/field-path-spec-v2", - "metadata-ingestion/adding-source", - "docs/how/add-custom-ingestion-source", - "docs/how/add-custom-data-platform", - "docs/advanced/browse-paths-upgrade", - "docs/browseV2/browse-paths-v2" - ] - } - ] - }, - { - "Community": [ - "docs/slack", - "docs/townhalls", - "docs/townhall-history", - "docs/CODE_OF_CONDUCT", - "docs/CONTRIBUTING", - "docs/links", - "docs/rfc" - ] - }, - { - "Managed DataHub": [ - "docs/managed-datahub/managed-datahub-overview", - "docs/managed-datahub/welcome-acryl", - { - "type": "doc", - "id": "docs/managed-datahub/saas-slack-setup", - "className": "saasOnly" - }, - { - "type": "doc", - "id": "docs/managed-datahub/approval-workflows", - "className": "saasOnly" - }, - { - "Metadata Ingestion With Acryl": [ - "docs/managed-datahub/metadata-ingestion-with-acryl/ingestion" - ] - }, - { - "DataHub API": [ - { - "type": "doc", - "id": "docs/managed-datahub/datahub-api/entity-events-api", - "className": "saasOnly" - }, - { - "GraphQL API": [ - "docs/managed-datahub/datahub-api/graphql-api/getting-started", - { - "type": "doc", - "id": "docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta", - "className": "saasOnly" - } - ] - } - ] - }, - { - "Integrations": [ - { - "type": "doc", - "id": "docs/managed-datahub/integrations/aws-privatelink", - "className": "saasOnly" - }, - { - "type": "doc", - "id": "docs/managed-datahub/integrations/oidc-sso-integration", - "className": "saasOnly" - } - ] - }, - { - "Operator Guide": [ - { - "type": "doc", - "id": "docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws", - "className": "saasOnly" - }, - { - "type": "doc", - "id": "docs/managed-datahub/operator-guide/setting-up-events-api-on-aws-eventbridge", - "className": "saasOnly" - } - ] - }, - { - "type": "doc", - "id": "docs/managed-datahub/chrome-extension", - "className": "saasOnly" - }, - { - "Managed DataHub Release History": [ - "docs/managed-datahub/release-notes/v_0_2_10", - "docs/managed-datahub/release-notes/v_0_2_9", - "docs/managed-datahub/release-notes/v_0_2_8", - "docs/managed-datahub/release-notes/v_0_2_7", - "docs/managed-datahub/release-notes/v_0_2_6", - "docs/managed-datahub/release-notes/v_0_2_5", - "docs/managed-datahub/release-notes/v_0_2_4", - "docs/managed-datahub/release-notes/v_0_2_3", - "docs/managed-datahub/release-notes/v_0_2_2", - "docs/managed-datahub/release-notes/v_0_2_1", - "docs/managed-datahub/release-notes/v_0_2_0", - "docs/managed-datahub/release-notes/v_0_1_73", - "docs/managed-datahub/release-notes/v_0_1_72", - "docs/managed-datahub/release-notes/v_0_1_70", - "docs/managed-datahub/release-notes/v_0_1_69" - ] - } - ] - }, - { - "Release History": [ - "releases" - ] - } - ] -} From dee1bc854cb93b2c9daf8a70d40ae05239560e72 Mon Sep 17 00:00:00 2001 From: Zachary McNellis Date: Wed, 30 Aug 2023 11:08:06 -0700 Subject: [PATCH 18/41] docs(observe): DataHub Operation freshness assertion guide (#8749) Co-authored-by: John Joyce --- .../observe/freshness-assertions.md | 41 +++++++++++++++---- 1 file changed, 34 insertions(+), 7 deletions(-) diff --git a/docs/managed-datahub/observe/freshness-assertions.md b/docs/managed-datahub/observe/freshness-assertions.md index d10d1f18eba5c..c5d4ca9081b43 100644 --- a/docs/managed-datahub/observe/freshness-assertions.md +++ b/docs/managed-datahub/observe/freshness-assertions.md @@ -122,8 +122,12 @@ Change Source types vary by the platform, but generally fall into these categori is higher than the previously observed value, in order to determine whether the Table has been changed within a given period of time. Note that this approach is only supported if the Change Window does not use a fixed interval. - Using the final 2 approaches - column value queries - to determine whether a Table has changed useful because it can be customized to determine whether - specific types of important changes have been made to a given Table. + - **DataHub Operation**: A DataHub "Operation" aspect contains timeseries information used to describe changes made to an entity. Using this + option avoids contacting your data platform, and instead uses the DataHub Operation metadata to evaluate Freshness Assertions. + This relies on Operations being reported to DataHub, either via ingestion or via use of the DataHub APIs (see [Report Operation via API](#reporting-operations-via-api)). + Note if you have not configured an ingestion source through DataHub, then this may be the only option available. + + Using either of the column value approaches (**Last Modified Column** or **High Watermark Column**) to determine whether a Table has changed can be useful because it can be customized to determine whether specific types of important changes have been made to a given Table. Because it does not involve system warehouse tables, it is also easily portable across Data Warehouse and Data Lake providers. Freshness Assertions also have an off switch: they can be started or stopped at any time with the click of button. @@ -178,7 +182,7 @@ _Check whether the table has changed in a specific window of time_ 7. (Optional) Click **Advanced** to customize the evaluation **source**. This is the mechanism that will be used to evaluate -the check. Each Data Platform supports different options including Audit Log, Information Schema, Last Modified Column, and High Watermark Column. +the check. Each Data Platform supports different options including Audit Log, Information Schema, Last Modified Column, High Watermark Column, and DataHub Operation.

@@ -189,11 +193,12 @@ the check. Each Data Platform supports different options including Audit Log, In - **Last Modified Column**: Check for the presence of rows using a "Last Modified Time" column, which should reflect the time at which a given row was last changed in the table, to determine whether the table changed within the evaluation period. - **High Watermark Column**: Monitor changes to a continuously-increasing "high watermark" column value to determine whether a table - has been changed. This option is particularly useful for tables that grow consistently with time, for example fact or event (e.g. click-strea) tables. It is not available + has been changed. This option is particularly useful for tables that grow consistently with time, for example fact or event (e.g. click-stream) tables. It is not available when using a fixed lookback period. +- **DataHub Operation**: Use DataHub Operations to determine whether the table changed within the evaluation period. -8. Click **Next** -9. Configure actions that should be taken when the Freshness Assertion passes or fails +1. Click **Next** +2. Configure actions that should be taken when the Freshness Assertion passes or fails

@@ -280,7 +285,7 @@ Note that to create or delete Assertions and Monitors for a specific entity on D In order to create a Freshness Assertion that is being monitored on a specific **Evaluation Schedule**, you'll need to use 2 GraphQL mutation queries to create a Freshness Assertion entity and create an Assertion Monitor entity responsible for evaluating it. -Start by creating the Freshness Assertion entity using the `createFreshnessAssertion` query and hang on to the 'urn' field of the Assertion entit y +Start by creating the Freshness Assertion entity using the `createFreshnessAssertion` query and hang on to the 'urn' field of the Assertion entity you get back. Then continue by creating a Monitor entity using the `createAssertionMonitor`. ##### Examples @@ -337,6 +342,28 @@ After creating the monitor, the new assertion will start to be evaluated every 8 You can delete assertions along with their monitors using GraphQL mutations: `deleteAssertion` and `deleteMonitor`. +### Reporting Operations via API + +DataHub Operations can be used to capture changes made to entities. This is useful for cases where the underlying data platform does not provide a mechanism +to capture changes, or where the data platform's mechanism is not reliable. In order to report an operation, you can use the `reportOperation` GraphQL mutation. + + +##### Examples +```json +mutation reportOperation { + reportOperation( + input: { + urn: "", + operationType: INSERT, + sourceType: DATA_PLATFORM, + timestampMillis: 1693252366489 + } + ) +} +``` + +Use the `timestampMillis` field to specify the time at which the operation occurred. If no value is provided, the current time will be used. + ### Tips :::info From 5032af9123fa44272e0b61c908a51f30ffeec4e4 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 30 Aug 2023 12:07:41 -0700 Subject: [PATCH 19/41] feat(cli): support recursive deletes (#8709) --- docs/how/delete-metadata.md | 19 ++++++ .../src/datahub/cli/delete_cli.py | 63 +++++++++++++++++-- .../src/datahub/ingestion/graph/client.py | 59 ++++++++++++++++- 3 files changed, 134 insertions(+), 7 deletions(-) diff --git a/docs/how/delete-metadata.md b/docs/how/delete-metadata.md index acbb573020be0..f720a66ce5765 100644 --- a/docs/how/delete-metadata.md +++ b/docs/how/delete-metadata.md @@ -43,6 +43,9 @@ datahub delete --platform snowflake # Filters can be combined, which will select entities that match all filters. datahub delete --platform looker --entity-type chart datahub delete --platform bigquery --env PROD + +# You can also do recursive deletes for container and dataPlatformInstance entities. +datahub delete --urn "urn:li:container:f76..." --recursive ``` When performing hard deletes, you can optionally add the `--only-soft-deleted` flag to only hard delete entities that were previously soft deleted. @@ -122,6 +125,14 @@ datahub delete --urn "urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted datahub delete --platform snowflake --env DEV ``` +#### Delete everything within a specific Snowflake DB + +```shell +# You can find your container urn by navigating to the relevant +# DB in the DataHub UI and clicking the "copy urn" button. +datahub delete --urn "urn:li:container:77644901c4f574845578ebd18b7c14fa" --recursive +``` + #### Delete all BigQuery datasets in the PROD environment ```shell @@ -129,6 +140,13 @@ datahub delete --platform snowflake --env DEV datahub delete --env PROD --entity-type dataset --platform bigquery ``` +#### Delete everything within a MySQL platform instance + +```shell +# The instance name comes from the `platform_instance` config option in the ingestion recipe. +datahub delete --urn 'urn:li:dataPlatformInstance:(urn:li:dataPlatform:mysql,my_instance_name)' --recursive +``` + #### Delete all pipelines and tasks from Airflow ```shell @@ -138,6 +156,7 @@ datahub delete --platform "airflow" #### Delete all containers for a particular platform ```shell +# Note: this will leave S3 datasets intact. datahub delete --entity-type container --platform s3 ``` diff --git a/metadata-ingestion/src/datahub/cli/delete_cli.py b/metadata-ingestion/src/datahub/cli/delete_cli.py index 0d3c35e933e25..7ab7605ef6363 100644 --- a/metadata-ingestion/src/datahub/cli/delete_cli.py +++ b/metadata-ingestion/src/datahub/cli/delete_cli.py @@ -37,6 +37,11 @@ "glossaryNode", } +_RECURSIVE_DELETE_TYPES = { + "container", + "dataPlatformInstance", +} + @click.group(cls=DefaultGroup, default="by-filter") def delete() -> None: @@ -252,6 +257,12 @@ def references(urn: str, dry_run: bool, force: bool) -> None: help="Entity type filter (e.g. dataset)", ) @click.option("--query", required=False, type=str, help="Elasticsearch query string") +@click.option( + "--recursive", + required=False, + is_flag=True, + help="Recursively delete all contained entities (only for containers and dataPlatformInstances)", +) @click.option( "--start-time", required=False, @@ -298,6 +309,7 @@ def by_filter( platform: Optional[str], entity_type: Optional[str], query: Optional[str], + recursive: bool, start_time: Optional[datetime], end_time: Optional[datetime], batch_size: int, @@ -308,7 +320,12 @@ def by_filter( # Validate the cli arguments. _validate_user_urn_and_filters( - urn=urn, entity_type=entity_type, platform=platform, env=env, query=query + urn=urn, + entity_type=entity_type, + platform=platform, + env=env, + query=query, + recursive=recursive, ) soft_delete_filter = _validate_user_soft_delete_flags( soft=soft, aspect=aspect, only_soft_deleted=only_soft_deleted @@ -327,11 +344,29 @@ def by_filter( logger.info(f"Using {graph}") # Determine which urns to delete. + delete_by_urn = bool(urn) and not recursive if urn: - delete_by_urn = True urns = [urn] + + if recursive: + # Add children urns to the list. + if guess_entity_type(urn) == "dataPlatformInstance": + urns.extend( + graph.get_urns_by_filter( + platform_instance=urn, + status=soft_delete_filter, + batch_size=batch_size, + ) + ) + else: + urns.extend( + graph.get_urns_by_filter( + container=urn, + status=soft_delete_filter, + batch_size=batch_size, + ) + ) else: - delete_by_urn = False urns = list( graph.get_urns_by_filter( entity_types=[entity_type] if entity_type else None, @@ -348,20 +383,22 @@ def by_filter( ) return + # Print out a summary of the urns to be deleted and confirm with the user. + if not delete_by_urn: urns_by_type: Dict[str, List[str]] = {} for urn in urns: entity_type = guess_entity_type(urn) urns_by_type.setdefault(entity_type, []).append(urn) if len(urns_by_type) > 1: # Display a breakdown of urns by entity type if there's multiple. - click.echo("Filter matched urns of multiple entity types") + click.echo("Found urns of multiple entity types") for entity_type, entity_urns in urns_by_type.items(): click.echo( f"- {len(entity_urns)} {entity_type} urn(s). Sample: {choices(entity_urns, k=min(5, len(entity_urns)))}" ) else: click.echo( - f"Filter matched {len(urns)} {entity_type} urn(s). Sample: {choices(urns, k=min(5, len(urns)))}" + f"Found {len(urns)} {entity_type} urn(s). Sample: {choices(urns, k=min(5, len(urns)))}" ) if not force and not dry_run: @@ -403,6 +440,7 @@ def _validate_user_urn_and_filters( platform: Optional[str], env: Optional[str], query: Optional[str], + recursive: bool, ) -> None: # Check urn / filters options. if urn: @@ -423,6 +461,21 @@ def _validate_user_urn_and_filters( f"Using --env without other filters will delete all metadata in the {env} environment. Please use with caution." ) + # Check recursive flag. + if recursive: + if not urn: + raise click.UsageError( + "The --recursive flag can only be used with a single urn." + ) + elif guess_entity_type(urn) not in _RECURSIVE_DELETE_TYPES: + raise click.UsageError( + f"The --recursive flag can only be used with these entity types: {_RECURSIVE_DELETE_TYPES}." + ) + elif urn and guess_entity_type(urn) in _RECURSIVE_DELETE_TYPES: + logger.warning( + f"This will only delete {urn}. Use --recursive to delete all contained entities." + ) + def _validate_user_soft_delete_flags( soft: bool, aspect: Optional[str], only_soft_deleted: bool diff --git a/metadata-ingestion/src/datahub/ingestion/graph/client.py b/metadata-ingestion/src/datahub/ingestion/graph/client.py index 50ea69b6c13a9..b371ab181e133 100644 --- a/metadata-ingestion/src/datahub/ingestion/graph/client.py +++ b/metadata-ingestion/src/datahub/ingestion/graph/client.py @@ -16,7 +16,12 @@ from datahub.cli.cli_utils import get_url_and_token from datahub.configuration.common import ConfigModel, GraphError, OperationalError from datahub.emitter.aspect import TIMESERIES_ASPECT_MAP -from datahub.emitter.mce_builder import DEFAULT_ENV, Aspect, make_data_platform_urn +from datahub.emitter.mce_builder import ( + DEFAULT_ENV, + Aspect, + make_data_platform_urn, + make_dataplatform_instance_urn, +) from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.emitter.rest_emitter import DatahubRestEmitter from datahub.emitter.serialization_helper import post_json_transform @@ -543,8 +548,10 @@ def get_urns_by_filter( *, entity_types: Optional[List[str]] = None, platform: Optional[str] = None, + platform_instance: Optional[str] = None, env: Optional[str] = None, query: Optional[str] = None, + container: Optional[str] = None, status: RemovedStatusFilter = RemovedStatusFilter.NOT_SOFT_DELETED, batch_size: int = 10000, extraFilters: Optional[List[SearchFilterRule]] = None, @@ -557,15 +564,25 @@ def get_urns_by_filter( :param entity_types: List of entity types to include. If None, all entity types will be returned. :param platform: Platform to filter on. If None, all platforms will be returned. + :param platform_instance: Platform instance to filter on. If None, all platform instances will be returned. :param env: Environment (e.g. PROD, DEV) to filter on. If None, all environments will be returned. + :param query: Query string to filter on. If None, all entities will be returned. + :param container: A container urn that entities must be within. + This works recursively, so it will include entities within sub-containers as well. + If None, all entities will be returned. + Note that this requires browsePathV2 aspects (added in 0.10.4+). :param status: Filter on the deletion status of the entity. The default is only return non-soft-deleted entities. :param extraFilters: Additional filters to apply. If specified, the results will match all of the filters. + + :return: An iterable of urns that match the filters. """ types: Optional[List[str]] = None if entity_types is not None: if not entity_types: - raise ValueError("entity_types cannot be an empty list") + raise ValueError( + "entity_types cannot be an empty list; use None for all entities" + ) types = [_graphql_entity_type(entity_type) for entity_type in entity_types] @@ -584,6 +601,44 @@ def get_urns_by_filter( } ] + # Platform instance filter. + if platform_instance: + if platform: + # Massage the platform instance into a fully qualified urn, if necessary. + platform_instance = make_dataplatform_instance_urn( + platform, platform_instance + ) + + # Warn if platform_instance is not a fully qualified urn. + # TODO: Change this once we have a first-class data platform instance urn type. + if guess_entity_type(platform_instance) != "dataPlatformInstance": + raise ValueError( + f"Invalid data platform instance urn: {platform_instance}" + ) + + andFilters += [ + { + "field": "platformInstance", + "values": [platform_instance], + "condition": "EQUAL", + } + ] + + # Browse path v2 filter. + if container: + # Warn if container is not a fully qualified urn. + # TODO: Change this once we have a first-class container urn type. + if guess_entity_type(container) != "container": + raise ValueError(f"Invalid container urn: {container}") + + andFilters += [ + { + "field": "browsePathV2", + "values": [container], + "condition": "CONTAIN", + } + ] + # Status filter. if status == RemovedStatusFilter.NOT_SOFT_DELETED: # Subtle: in some cases (e.g. when the dataset doesn't have a status aspect), the From fa0c43c0313f6239f54879819ffc6c6dc04cbef5 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Wed, 30 Aug 2023 15:47:08 -0400 Subject: [PATCH 20/41] fix(ingest/bigquery): Handle null view_definition; remove view definition hash ids (#8747) --- .../ingestion/source/bigquery_v2/bigquery.py | 27 +++++++------------ .../source/snowflake/snowflake_v2.py | 3 ++- .../ingestion/source/sql/sql_generic.py | 2 +- 3 files changed, 12 insertions(+), 20 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py index 7690723837165..1107a54a1896b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py @@ -1,5 +1,4 @@ import atexit -import hashlib import logging import os import re @@ -146,10 +145,6 @@ def cleanup(config: BigQueryV2Config) -> None: os.unlink(config._credentials_path) -def _generate_sql_id(sql: str) -> str: - return hashlib.md5(sql.encode("utf-8")).hexdigest() - - @platform_name("BigQuery", doc_order=1) @config_class(BigQueryV2Config) @support_status(SupportStatus.CERTIFIED) @@ -286,10 +281,9 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): # Global store of table identifiers for lineage filtering self.table_refs: Set[str] = set() - # We do this so that the SQL is stored in a file-backed dict, but the sql IDs are stored in memory. - # Maps project -> view_ref -> sql ID (will be used when generating lineage) - self.view_definition_ids: Dict[str, Dict[str, str]] = defaultdict(dict) - # Maps sql ID -> actual sql + # Maps project -> view_ref, so we can find all views in a project + self.view_refs_by_project: Dict[str, Set[str]] = defaultdict(set) + # Maps view ref -> actual sql self.view_definitions: FileBackedDict[str] = FileBackedDict() self.sql_parser_schema_resolver = SchemaResolver( @@ -684,10 +678,8 @@ def generate_lineage(self, project_id: str) -> Iterable[MetadataWorkUnit]: ) if self.config.lineage_parse_view_ddl: - for view, view_definition_id in self.view_definition_ids[ - project_id - ].items(): - view_definition = self.view_definitions[view_definition_id] + for view in self.view_refs_by_project[project_id]: + view_definition = self.view_definitions[view] raw_view_lineage = sqlglot_lineage( view_definition, schema_resolver=self.sql_parser_schema_resolver, @@ -896,10 +888,9 @@ def _process_view( BigQueryTableRef(table_identifier).get_sanitized_table_ref() ) self.table_refs.add(table_ref) - if self.config.lineage_parse_view_ddl: - view_definition_id = _generate_sql_id(view.view_definition) - self.view_definition_ids[project_id][table_ref] = view_definition_id - self.view_definitions[view_definition_id] = view.view_definition + if self.config.lineage_parse_view_ddl and view.view_definition: + self.view_refs_by_project[project_id].add(table_ref) + self.view_definitions[table_ref] = view.view_definition view.column_count = len(columns) if not view.column_count: @@ -989,7 +980,7 @@ def gen_view_dataset_workunits( view_properties_aspect = ViewProperties( materialized=view.materialized, viewLanguage="SQL", - viewLogic=view_definition_string, + viewLogic=view_definition_string or "", ) yield MetadataChangeProposalWrapper( entityUrn=self.gen_dataset_urn( diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_v2.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_v2.py index 90b751c875add..e561ed0e2d146 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_v2.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_v2.py @@ -770,7 +770,8 @@ def _process_schema( if self.config.parse_view_ddl: for view in views: key = self.get_dataset_identifier(view.name, schema_name, db_name) - self.view_definitions[key] = view.view_definition + if view.view_definition: + self.view_definitions[key] = view.view_definition if self.config.include_technical_schema or self.config.parse_view_ddl: for view in views: diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic.py index aa0493a18ab58..345f5bd57b44c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic.py @@ -44,7 +44,7 @@ class BaseView: comment: Optional[str] created: Optional[datetime] last_altered: Optional[datetime] - view_definition: str + view_definition: Optional[str] size_in_bytes: Optional[int] = None rows_count: Optional[int] = None column_count: Optional[int] = None From 026f7abe9c61510c6414730e04169569674f0feb Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Wed, 30 Aug 2023 15:53:08 -0400 Subject: [PATCH 21/41] feat(ingest/usage): Make cumulative query character limit configurable (#8751) --- .../ingestion/source/bigquery_v2/usage.py | 10 ++++---- .../ingestion/source/redshift/usage.py | 1 + .../source/snowflake/snowflake_usage_v2.py | 3 +-- .../source/usage/clickhouse_usage.py | 1 + .../ingestion/source/usage/redshift_usage.py | 1 + .../source/usage/starburst_trino_usage.py | 1 + .../ingestion/source/usage/usage_common.py | 24 +++++++++++++------ .../tests/unit/test_usage_common.py | 15 ++++++------ 8 files changed, 33 insertions(+), 23 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py index fe7ab8c49c79a..e112db31c5c63 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py @@ -56,10 +56,7 @@ from datahub.ingestion.source.state.redundant_run_skip_handler import ( RedundantUsageRunSkipHandler, ) -from datahub.ingestion.source.usage.usage_common import ( - TOTAL_BUDGET_FOR_QUERY_LIST, - make_usage_workunit, -) +from datahub.ingestion.source.usage.usage_common import make_usage_workunit from datahub.ingestion.source_report.ingestion_stage import ( USAGE_EXTRACTION_INGESTION, USAGE_EXTRACTION_OPERATIONAL_STATS, @@ -101,7 +98,6 @@ READ_STATEMENT_TYPES: List[str] = ["SELECT"] STRING_ENCODING = "utf-8" -MAX_QUERY_LENGTH = TOTAL_BUDGET_FOR_QUERY_LIST @dataclass(frozen=True, order=True) @@ -601,6 +597,7 @@ def _generate_usage_workunits( resource_urn_builder=self.dataset_urn_builder, top_n_queries=self.config.usage.top_n_queries, format_sql_queries=self.config.usage.format_sql_queries, + queries_character_limit=self.config.usage.queries_character_limit, ) self.report.num_usage_workunits_emitted += 1 except Exception as e: @@ -662,7 +659,8 @@ def _store_usage_event( usage_state.column_accesses[str(uuid.uuid4())] = key, field_read return True elif event.query_event and event.query_event.job_name: - query = event.query_event.query[:MAX_QUERY_LENGTH] + max_query_length = self.config.usage.queries_character_limit + query = event.query_event.query[:max_query_length] query_hash = hashlib.md5(query.encode(STRING_ENCODING)).hexdigest() if usage_state.queries.get(query_hash, query) != query: key = str(uuid.uuid4()) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py index 953f0edd7c2bb..bbb1876102578 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py @@ -449,6 +449,7 @@ def _make_usage_stat(self, agg: AggregatedDataset) -> MetadataWorkUnit: self.config.top_n_queries, self.config.format_sql_queries, self.config.include_top_n_queries, + self.config.queries_character_limit, ) def report_status(self, step: str, status: bool) -> None: diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py index f79be7174dbd9..a64921ea01759 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_usage_v2.py @@ -25,7 +25,6 @@ from datahub.ingestion.source.state.redundant_run_skip_handler import ( RedundantUsageRunSkipHandler, ) -from datahub.ingestion.source.usage.usage_common import TOTAL_BUDGET_FOR_QUERY_LIST from datahub.ingestion.source_report.ingestion_stage import ( USAGE_EXTRACTION_OPERATIONAL_STATS, USAGE_EXTRACTION_USAGE_AGGREGATION, @@ -280,7 +279,7 @@ def build_usage_statistics_for_dataset(self, dataset_identifier, row): def _map_top_sql_queries(self, top_sql_queries: Dict) -> List[str]: budget_per_query: int = int( - TOTAL_BUDGET_FOR_QUERY_LIST / self.config.top_n_queries + self.config.queries_character_limit / self.config.top_n_queries ) return sorted( [ diff --git a/metadata-ingestion/src/datahub/ingestion/source/usage/clickhouse_usage.py b/metadata-ingestion/src/datahub/ingestion/source/usage/clickhouse_usage.py index ffa08752070dd..855958f0755e1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/usage/clickhouse_usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/usage/clickhouse_usage.py @@ -248,6 +248,7 @@ def _make_usage_stat(self, agg: AggregatedDataset) -> MetadataWorkUnit: self.config.top_n_queries, self.config.format_sql_queries, self.config.include_top_n_queries, + self.config.queries_character_limit, ) def get_report(self) -> SourceReport: diff --git a/metadata-ingestion/src/datahub/ingestion/source/usage/redshift_usage.py b/metadata-ingestion/src/datahub/ingestion/source/usage/redshift_usage.py index ea817f40f6a2b..99a980b326e53 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/usage/redshift_usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/usage/redshift_usage.py @@ -392,6 +392,7 @@ def _make_usage_stat(self, agg: AggregatedDataset) -> MetadataWorkUnit: self.config.top_n_queries, self.config.format_sql_queries, self.config.include_top_n_queries, + self.config.queries_character_limit, ) def get_report(self) -> RedshiftUsageSourceReport: diff --git a/metadata-ingestion/src/datahub/ingestion/source/usage/starburst_trino_usage.py b/metadata-ingestion/src/datahub/ingestion/source/usage/starburst_trino_usage.py index 7dd66fd1e3d0c..9394a8bba5e0b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/usage/starburst_trino_usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/usage/starburst_trino_usage.py @@ -282,6 +282,7 @@ def _make_usage_stat(self, agg: AggregatedDataset) -> MetadataWorkUnit: self.config.top_n_queries, self.config.format_sql_queries, self.config.include_top_n_queries, + self.config.queries_character_limit, ) def get_report(self) -> SourceReport: diff --git a/metadata-ingestion/src/datahub/ingestion/source/usage/usage_common.py b/metadata-ingestion/src/datahub/ingestion/source/usage/usage_common.py index 92f8223f34d14..4547f9f368198 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/usage/usage_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/usage/usage_common.py @@ -47,7 +47,7 @@ ResourceType = TypeVar("ResourceType") # The total number of characters allowed across all queries in a single workunit. -TOTAL_BUDGET_FOR_QUERY_LIST = 24000 +DEFAULT_QUERIES_CHARACTER_LIMIT = 24000 def default_user_urn_builder(email: str) -> str: @@ -65,8 +65,8 @@ def make_usage_workunit( resource_urn_builder: Callable[[ResourceType], str], top_n_queries: int, format_sql_queries: bool, + queries_character_limit: int, user_urn_builder: Optional[Callable[[str], str]] = None, - total_budget_for_query_list: int = TOTAL_BUDGET_FOR_QUERY_LIST, query_trimmer_string: str = " ...", ) -> MetadataWorkUnit: if user_urn_builder is None: @@ -74,7 +74,7 @@ def make_usage_workunit( top_sql_queries: Optional[List[str]] = None if query_freq is not None: - budget_per_query: int = int(total_budget_for_query_list / top_n_queries) + budget_per_query: int = int(queries_character_limit / top_n_queries) top_sql_queries = [ trim_query( format_sql_query(query, keyword_case="upper", reindent_aligned=True) @@ -154,8 +154,8 @@ def make_usage_workunit( top_n_queries: int, format_sql_queries: bool, include_top_n_queries: bool, + queries_character_limit: int, user_urn_builder: Optional[Callable[[str], str]] = None, - total_budget_for_query_list: int = TOTAL_BUDGET_FOR_QUERY_LIST, query_trimmer_string: str = " ...", ) -> MetadataWorkUnit: query_freq = ( @@ -173,12 +173,21 @@ def make_usage_workunit( user_urn_builder=user_urn_builder, top_n_queries=top_n_queries, format_sql_queries=format_sql_queries, - total_budget_for_query_list=total_budget_for_query_list, + queries_character_limit=queries_character_limit, query_trimmer_string=query_trimmer_string, ) class BaseUsageConfig(BaseTimeWindowConfig): + queries_character_limit: int = Field( + default=DEFAULT_QUERIES_CHARACTER_LIMIT, + description=( + "Total character limit for all queries in a single usage aspect." + " Queries will be truncated to length `queries_character_limit / top_n_queries`." + ), + hidden_from_docs=True, # Don't want to encourage people to break elasticsearch + ) + top_n_queries: pydantic.PositiveInt = Field( default=10, description="Number of top queries to save to each table." ) @@ -203,10 +212,10 @@ class BaseUsageConfig(BaseTimeWindowConfig): ) @pydantic.validator("top_n_queries") - def ensure_top_n_queries_is_not_too_big(cls, v: int) -> int: + def ensure_top_n_queries_is_not_too_big(cls, v: int, values: dict) -> int: minimum_query_size = 20 - max_queries = int(TOTAL_BUDGET_FOR_QUERY_LIST / minimum_query_size) + max_queries = int(values["queries_character_limit"] / minimum_query_size) if v > max_queries: raise ValueError( f"top_n_queries is set to {v} but it can be maximum {max_queries}" @@ -259,6 +268,7 @@ def generate_workunits( include_top_n_queries=self.config.include_top_n_queries, resource_urn_builder=resource_urn_builder, user_urn_builder=user_urn_builder, + queries_character_limit=self.config.queries_character_limit, ) diff --git a/metadata-ingestion/tests/unit/test_usage_common.py b/metadata-ingestion/tests/unit/test_usage_common.py index 8c9c25593afa8..1e2b2b6999177 100644 --- a/metadata-ingestion/tests/unit/test_usage_common.py +++ b/metadata-ingestion/tests/unit/test_usage_common.py @@ -1,6 +1,5 @@ import time from datetime import datetime -from unittest import mock import pytest from freezegun import freeze_time @@ -12,6 +11,7 @@ from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.usage.usage_common import ( + DEFAULT_QUERIES_CHARACTER_LIMIT, BaseUsageConfig, GenericAggregatedDataset, convert_usage_aggregation_class, @@ -183,6 +183,7 @@ def test_make_usage_workunit(): top_n_queries=10, format_sql_queries=False, include_top_n_queries=True, + queries_character_limit=DEFAULT_QUERIES_CHARACTER_LIMIT, ) ts_timestamp = int(floored_ts.timestamp() * 1000) @@ -218,6 +219,7 @@ def test_query_formatting(): top_n_queries=10, format_sql_queries=True, include_top_n_queries=True, + queries_character_limit=DEFAULT_QUERIES_CHARACTER_LIMIT, ) ts_timestamp = int(floored_ts.timestamp() * 1000) assert ( @@ -234,7 +236,7 @@ def test_query_trimming(): test_email: str = "test_email@test.com" test_query: str = "select * from test where a > 10 and b > 20 order by a asc" top_n_queries: int = 10 - total_budget_for_query_list: int = 200 + queries_character_limit: int = 200 event_time = datetime(2020, 1, 1) floored_ts = get_time_bucket(event_time, BucketDuration.DAY) resource = "test_db.test_schema.test_table" @@ -251,7 +253,7 @@ def test_query_trimming(): top_n_queries=top_n_queries, format_sql_queries=False, include_top_n_queries=True, - total_budget_for_query_list=total_budget_for_query_list, + queries_character_limit=queries_character_limit, ) ts_timestamp = int(floored_ts.timestamp() * 1000) @@ -267,11 +269,7 @@ def test_query_trimming(): def test_top_n_queries_validator_fails(): with pytest.raises(ValidationError) as excinfo: - with mock.patch( - "datahub.ingestion.source.usage.usage_common.TOTAL_BUDGET_FOR_QUERY_LIST", - 20, - ): - BaseUsageConfig(top_n_queries=2) + BaseUsageConfig(top_n_queries=2, queries_character_limit=20) assert "top_n_queries is set to 2 but it can be maximum 1" in str(excinfo.value) @@ -294,6 +292,7 @@ def test_make_usage_workunit_include_top_n_queries(): top_n_queries=10, format_sql_queries=False, include_top_n_queries=False, + queries_character_limit=DEFAULT_QUERIES_CHARACTER_LIMIT, ) ts_timestamp = int(floored_ts.timestamp() * 1000) From c193b1dc7061bcdd382aa11210a33f999c25e7cf Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Wed, 30 Aug 2023 22:12:02 +0200 Subject: [PATCH 22/41] fix(ingest/athena): Fixing db container id (#8689) --- .../src/datahub/ingestion/source/sql/athena.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py index c95e20252e421..53501b9a536ee 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py @@ -192,15 +192,12 @@ def gen_schema_containers( database: str, extra_properties: Optional[Dict[str, Any]] = None, ) -> Iterable[MetadataWorkUnit]: - database_container_key = gen_database_key( - database, - platform=self.platform, - platform_instance=self.config.platform_instance, - env=self.config.env, + database_container_key = self.get_database_container_key( + db_name=database, schema=schema ) yield from gen_database_container( - database=database, + database=database_container_key.database, database_container_key=database_container_key, sub_types=[DatasetContainerSubTypes.DATABASE], domain_registry=self.domain_registry, From 1282e5bf93982899f5f17307937cc6902515b356 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 30 Aug 2023 13:19:28 -0700 Subject: [PATCH 23/41] feat(systemMetadata): add pipeline names to system metadata (#8684) --- .../ingestion/extractor/mce_extractor.py | 7 +++ .../com/linkedin/mxe/SystemMetadata.pdl | 5 ++ .../com.linkedin.entity.aspects.snapshot.json | 47 +++++++------- ...com.linkedin.entity.entities.snapshot.json | 63 ++++++++++--------- ...m.linkedin.entity.entitiesV2.snapshot.json | 5 ++ ...n.entity.entitiesVersionedV2.snapshot.json | 5 ++ .../com.linkedin.entity.runs.snapshot.json | 42 ++++++------- ...nkedin.operations.operations.snapshot.json | 42 ++++++------- ...m.linkedin.platform.platform.snapshot.json | 58 ++++++++--------- 9 files changed, 153 insertions(+), 121 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/extractor/mce_extractor.py b/metadata-ingestion/src/datahub/ingestion/extractor/mce_extractor.py index 62e880a2e5334..36450dda153d7 100644 --- a/metadata-ingestion/src/datahub/ingestion/extractor/mce_extractor.py +++ b/metadata-ingestion/src/datahub/ingestion/extractor/mce_extractor.py @@ -27,6 +27,9 @@ def _try_reformat_with_black(code: str) -> str: class WorkUnitRecordExtractorConfig(ConfigModel): set_system_metadata = True + set_system_metadata_pipeline_name = ( + False # false for now until the models are available in OSS + ) unpack_mces_into_mcps = False @@ -66,6 +69,10 @@ def get_records( workunit.metadata.systemMetadata = SystemMetadata( lastObserved=get_sys_time(), runId=self.ctx.run_id ) + if self.config.set_system_metadata_pipeline_name: + workunit.metadata.systemMetadata.pipelineName = ( + self.ctx.pipeline_name + ) if ( isinstance(workunit.metadata, MetadataChangeEvent) and len(workunit.metadata.proposedSnapshot.aspects) == 0 diff --git a/metadata-models/src/main/pegasus/com/linkedin/mxe/SystemMetadata.pdl b/metadata-models/src/main/pegasus/com/linkedin/mxe/SystemMetadata.pdl index b9cf7d58d434e..e0f355229c912 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/mxe/SystemMetadata.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/mxe/SystemMetadata.pdl @@ -14,6 +14,11 @@ record SystemMetadata { */ runId: optional string = "no-run-id-provided" + /** + * The ingestion pipeline id that produced the metadata. Populated in case of batch ingestion. + */ + pipelineName: optional string + /** * The model registry name that was used to process this event */ diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json index e3beef5ac4871..ee6318026e27d 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json @@ -72,6 +72,11 @@ "doc" : "The run id that produced the metadata. Populated in case of batch-ingestion.", "default" : "no-run-id-provided", "optional" : true + }, { + "name" : "pipelineName", + "type" : "string", + "doc" : "The ingestion pipeline id that produced the metadata. Populated in case of batch ingestion.", + "optional" : true }, { "name" : "registryName", "type" : "string", @@ -342,7 +347,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1281,7 +1286,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1408,7 +1413,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1468,7 +1473,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1870,7 +1875,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "qualifiedName", @@ -1881,7 +1886,7 @@ "addToFilters" : false, "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -2067,7 +2072,7 @@ "enableAutocomplete" : true, "fieldName" : "displayName", "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "id", @@ -2104,7 +2109,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "definition", @@ -2296,7 +2301,7 @@ "optional" : true, "Searchable" : { "boostScore" : 10.0, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2349,7 +2354,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2412,7 +2417,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2505,7 +2510,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } } ], @@ -2525,7 +2530,7 @@ "boostScore" : 2.0, "enableAutocomplete" : true, "fieldName" : "ldap", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2571,7 +2576,7 @@ "doc" : "Unique Identifier of the data flow", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "cluster", @@ -2608,7 +2613,7 @@ "doc" : "Unique Identifier of the data job", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -3183,7 +3188,7 @@ "type" : "string", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -3201,7 +3206,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -3227,7 +3232,7 @@ "boostScore" : 8.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -3293,7 +3298,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -3860,7 +3865,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -3879,7 +3884,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index 0c9b49649bf1e..d63a938bbce9d 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -95,7 +95,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1328,7 +1328,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1474,7 +1474,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1534,7 +1534,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1927,7 +1927,7 @@ "boostScore" : 10.0, "enableAutocomplete" : false, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" }, "validate" : { "strlen" : { @@ -1942,7 +1942,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "type", @@ -2117,7 +2117,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "qualifiedName", @@ -2128,7 +2128,7 @@ "addToFilters" : false, "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -2423,7 +2423,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } } ], @@ -2562,7 +2562,7 @@ "boostScore" : 2.0, "enableAutocomplete" : true, "fieldName" : "ldap", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2593,7 +2593,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2656,7 +2656,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2717,7 +2717,7 @@ "optional" : true, "Searchable" : { "boostScore" : 10.0, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2878,7 +2878,7 @@ "doc" : "Unique Identifier of the data flow", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "cluster", @@ -2941,7 +2941,7 @@ "doc" : "Unique Identifier of the data job", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2994,7 +2994,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -3607,7 +3607,7 @@ "Searchable" : { "boostScore" : 4.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "orchestrator", @@ -3713,7 +3713,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -4312,7 +4312,7 @@ "boostScore" : 8.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4401,7 +4401,7 @@ "boostScore" : 8.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4496,7 +4496,7 @@ "boostScore" : 8.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4603,7 +4603,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -4710,7 +4710,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -4792,7 +4792,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4811,7 +4811,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -4866,7 +4866,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4895,7 +4895,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "definition", @@ -5073,7 +5073,7 @@ "type" : "string", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -5113,7 +5113,7 @@ "enableAutocomplete" : true, "fieldName" : "displayName", "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "id", @@ -6252,6 +6252,11 @@ "doc" : "The run id that produced the metadata. Populated in case of batch-ingestion.", "default" : "no-run-id-provided", "optional" : true + }, { + "name" : "pipelineName", + "type" : "string", + "doc" : "The ingestion pipeline id that produced the metadata. Populated in case of batch ingestion.", + "optional" : true }, { "name" : "registryName", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json index de65aa841876f..0b31bf9683d0c 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json @@ -117,6 +117,11 @@ "doc" : "The run id that produced the metadata. Populated in case of batch-ingestion.", "default" : "no-run-id-provided", "optional" : true + }, { + "name" : "pipelineName", + "type" : "string", + "doc" : "The ingestion pipeline id that produced the metadata. Populated in case of batch ingestion.", + "optional" : true }, { "name" : "registryName", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json index b7bcd8db99691..24a4ec2cc6802 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json @@ -126,6 +126,11 @@ "doc" : "The run id that produced the metadata. Populated in case of batch-ingestion.", "default" : "no-run-id-provided", "optional" : true + }, { + "name" : "pipelineName", + "type" : "string", + "doc" : "The ingestion pipeline id that produced the metadata. Populated in case of batch ingestion.", + "optional" : true }, { "name" : "registryName", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json index ffaefc8232e83..b20953749ac35 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json @@ -95,7 +95,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1034,7 +1034,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1161,7 +1161,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1221,7 +1221,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1623,7 +1623,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "qualifiedName", @@ -1634,7 +1634,7 @@ "addToFilters" : false, "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1812,7 +1812,7 @@ "enableAutocomplete" : true, "fieldName" : "displayName", "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "id", @@ -1849,7 +1849,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "definition", @@ -2041,7 +2041,7 @@ "optional" : true, "Searchable" : { "boostScore" : 10.0, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2094,7 +2094,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2157,7 +2157,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2250,7 +2250,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } } ], @@ -2270,7 +2270,7 @@ "boostScore" : 2.0, "enableAutocomplete" : true, "fieldName" : "ldap", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2316,7 +2316,7 @@ "doc" : "Unique Identifier of the data flow", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "cluster", @@ -2353,7 +2353,7 @@ "doc" : "Unique Identifier of the data job", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2928,7 +2928,7 @@ "type" : "string", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2946,7 +2946,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2972,7 +2972,7 @@ "boostScore" : 8.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -3038,7 +3038,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -3605,7 +3605,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -3624,7 +3624,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index e385c7c30b21a..e29dd6809b968 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -95,7 +95,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1034,7 +1034,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1161,7 +1161,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1221,7 +1221,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1623,7 +1623,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "qualifiedName", @@ -1634,7 +1634,7 @@ "addToFilters" : false, "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1806,7 +1806,7 @@ "enableAutocomplete" : true, "fieldName" : "displayName", "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "id", @@ -1843,7 +1843,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "definition", @@ -2035,7 +2035,7 @@ "optional" : true, "Searchable" : { "boostScore" : 10.0, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2088,7 +2088,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2151,7 +2151,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2244,7 +2244,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } } ], @@ -2264,7 +2264,7 @@ "boostScore" : 2.0, "enableAutocomplete" : true, "fieldName" : "ldap", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2310,7 +2310,7 @@ "doc" : "Unique Identifier of the data flow", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "cluster", @@ -2347,7 +2347,7 @@ "doc" : "Unique Identifier of the data job", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2922,7 +2922,7 @@ "type" : "string", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2940,7 +2940,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2966,7 +2966,7 @@ "boostScore" : 8.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -3032,7 +3032,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -3599,7 +3599,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -3618,7 +3618,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json index b85c84be23795..8391af60f8ece 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json @@ -95,7 +95,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1328,7 +1328,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1474,7 +1474,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1534,7 +1534,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -1927,7 +1927,7 @@ "boostScore" : 10.0, "enableAutocomplete" : false, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" }, "validate" : { "strlen" : { @@ -1942,7 +1942,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "type", @@ -2117,7 +2117,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "qualifiedName", @@ -2128,7 +2128,7 @@ "addToFilters" : false, "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -2417,7 +2417,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } } ], @@ -2556,7 +2556,7 @@ "boostScore" : 2.0, "enableAutocomplete" : true, "fieldName" : "ldap", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2587,7 +2587,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2650,7 +2650,7 @@ "Searchable" : { "boostScore" : 10.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2711,7 +2711,7 @@ "optional" : true, "Searchable" : { "boostScore" : 10.0, - "fieldType" : "TEXT_PARTIAL", + "fieldType" : "WORD_GRAM", "queryByDefault" : true } }, { @@ -2872,7 +2872,7 @@ "doc" : "Unique Identifier of the data flow", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "cluster", @@ -2935,7 +2935,7 @@ "doc" : "Unique Identifier of the data job", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -2988,7 +2988,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -3601,7 +3601,7 @@ "Searchable" : { "boostScore" : 4.0, "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "orchestrator", @@ -3707,7 +3707,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -4306,7 +4306,7 @@ "boostScore" : 8.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4395,7 +4395,7 @@ "boostScore" : 8.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4490,7 +4490,7 @@ "boostScore" : 8.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4597,7 +4597,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -4704,7 +4704,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "origin", @@ -4786,7 +4786,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4805,7 +4805,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "description", @@ -4860,7 +4860,7 @@ "Searchable" : { "enableAutocomplete" : true, "fieldName" : "id", - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -4889,7 +4889,7 @@ "boostScore" : 10.0, "enableAutocomplete" : true, "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "definition", @@ -5067,7 +5067,7 @@ "type" : "string", "Searchable" : { "enableAutocomplete" : true, - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } } ], "Aspect" : { @@ -5107,7 +5107,7 @@ "enableAutocomplete" : true, "fieldName" : "displayName", "fieldNameAliases" : [ "_entityName" ], - "fieldType" : "TEXT_PARTIAL" + "fieldType" : "WORD_GRAM" } }, { "name" : "id", From e867dbc3daa9e946362c53aa4beebb8c0621faad Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Thu, 31 Aug 2023 02:38:42 +0530 Subject: [PATCH 24/41] ci: separate airflow build and test (#8688) Co-authored-by: Harshal Sheth --- .github/workflows/airflow-plugin.yml | 85 +++ .github/workflows/metadata-ingestion.yml | 7 +- .github/workflows/test-results.yml | 2 +- docs/lineage/airflow.md | 6 +- .../airflow-plugin/build.gradle | 59 +- .../airflow-plugin/pyproject.toml | 1 - .../airflow-plugin/setup.cfg | 4 +- .../airflow-plugin/setup.py | 24 +- .../datahub_airflow_plugin/_airflow_compat.py | 12 + .../datahub_airflow_plugin/_airflow_shims.py | 29 + .../datahub_airflow_plugin/_lineage_core.py | 115 ++++ .../client}/__init__.py | 0 .../client/airflow_generator.py | 512 ++++++++++++++++++ .../datahub_airflow_plugin/datahub_plugin.py | 371 ++++++++++++- .../src/datahub_airflow_plugin/entities.py | 47 ++ .../example_dags/.airflowignore | 0 .../example_dags/__init__.py | 0 .../example_dags/generic_recipe_sample_dag.py | 2 +- .../example_dags/lineage_backend_demo.py | 3 +- .../lineage_backend_taskflow_demo.py | 3 +- .../example_dags/lineage_emission_dag.py | 5 +- .../example_dags/mysql_sample_dag.py | 1 + .../example_dags/snowflake_sample_dag.py | 1 + .../datahub_airflow_plugin/hooks/__init__.py | 0 .../datahub_airflow_plugin/hooks/datahub.py | 214 ++++++++ .../lineage/__init__.py | 0 .../datahub_airflow_plugin/lineage/datahub.py | 91 ++++ .../operators/__init__.py | 0 .../operators/datahub.py | 63 +++ .../operators/datahub_assertion_operator.py | 78 +++ .../operators/datahub_assertion_sensor.py | 78 +++ .../operators/datahub_operation_operator.py | 97 ++++ .../operators/datahub_operation_sensor.py | 100 ++++ .../tests/unit/test_airflow.py | 16 +- metadata-ingestion/developing.md | 12 +- metadata-ingestion/schedule_docs/airflow.md | 6 +- metadata-ingestion/setup.cfg | 3 - metadata-ingestion/setup.py | 10 +- .../src/datahub_provider/__init__.py | 29 +- .../src/datahub_provider/_airflow_compat.py | 13 +- .../src/datahub_provider/_airflow_shims.py | 34 +- .../src/datahub_provider/_lineage_core.py | 115 +--- .../src/datahub_provider/_plugin.py | 369 +------------ .../client/airflow_generator.py | 510 +---------------- .../src/datahub_provider/entities.py | 49 +- .../src/datahub_provider/hooks/datahub.py | 220 +------- .../src/datahub_provider/lineage/datahub.py | 93 +--- .../src/datahub_provider/operators/datahub.py | 65 +-- .../operators/datahub_assertion_operator.py | 79 +-- .../operators/datahub_assertion_sensor.py | 79 +-- .../operators/datahub_operation_operator.py | 98 +--- .../operators/datahub_operation_sensor.py | 101 +--- 52 files changed, 2037 insertions(+), 1874 deletions(-) create mode 100644 .github/workflows/airflow-plugin.yml create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_compat.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_lineage_core.py rename {metadata-ingestion/src/datahub_provider/example_dags => metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client}/__init__.py (100%) create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/entities.py rename {metadata-ingestion/src/datahub_provider => metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin}/example_dags/.airflowignore (100%) rename .github/workflows/docker-ingestion-base.yml => metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/__init__.py (100%) rename {metadata-ingestion/src/datahub_provider => metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin}/example_dags/generic_recipe_sample_dag.py (98%) rename {metadata-ingestion/src/datahub_provider => metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin}/example_dags/lineage_backend_demo.py (94%) rename {metadata-ingestion/src/datahub_provider => metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin}/example_dags/lineage_backend_taskflow_demo.py (94%) rename {metadata-ingestion/src/datahub_provider => metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin}/example_dags/lineage_emission_dag.py (96%) rename {metadata-ingestion/src/datahub_provider => metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin}/example_dags/mysql_sample_dag.py (98%) rename {metadata-ingestion/src/datahub_provider => metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin}/example_dags/snowflake_sample_dag.py (99%) create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/__init__.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/__init__.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/datahub.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/__init__.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_assertion_operator.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_assertion_sensor.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_operation_operator.py create mode 100644 metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_operation_sensor.py rename {metadata-ingestion => metadata-ingestion-modules/airflow-plugin}/tests/unit/test_airflow.py (97%) diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml new file mode 100644 index 0000000000000..63bab821cc398 --- /dev/null +++ b/.github/workflows/airflow-plugin.yml @@ -0,0 +1,85 @@ +name: Airflow Plugin +on: + push: + branches: + - master + paths: + - ".github/workflows/airflow-plugin.yml" + - "metadata-ingestion-modules/airflow-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + pull_request: + branches: + - master + paths: + - ".github/**" + - "metadata-ingestion-modules/airflow-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + release: + types: [published] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + airflow-plugin: + runs-on: ubuntu-latest + env: + SPARK_VERSION: 3.0.3 + DATAHUB_TELEMETRY_ENABLED: false + strategy: + matrix: + include: + - python-version: "3.7" + extraPythonRequirement: "apache-airflow~=2.1.0" + - python-version: "3.7" + extraPythonRequirement: "apache-airflow~=2.2.0" + - python-version: "3.10" + extraPythonRequirement: "apache-airflow~=2.4.0" + - python-version: "3.10" + extraPythonRequirement: "apache-airflow~=2.6.0" + - python-version: "3.10" + extraPythonRequirement: "apache-airflow>2.6.0" + fail-fast: false + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + - name: Install dependencies + run: ./metadata-ingestion/scripts/install_deps.sh + - name: Install airflow package and test (extras ${{ matrix.extraPythonRequirement }}) + run: ./gradlew -Pextra_pip_requirements='${{ matrix.extraPythonRequirement }}' :metadata-ingestion-modules:airflow-plugin:lint :metadata-ingestion-modules:airflow-plugin:testQuick + - name: pip freeze show list installed + if: always() + run: source metadata-ingestion-modules/airflow-plugin/venv/bin/activate && pip freeze + - uses: actions/upload-artifact@v3 + if: ${{ always() && matrix.python-version == '3.10' && matrix.extraPythonRequirement == 'apache-airflow>2.6.0' }} + with: + name: Test Results (Airflow Plugin ${{ matrix.python-version}}) + path: | + **/build/reports/tests/test/** + **/build/test-results/test/** + **/junit.*.xml + - name: Upload coverage to Codecov + if: always() + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + directory: . + fail_ci_if_error: false + flags: airflow-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }} + name: pytest-airflow + verbose: true + + event-file: + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v3 + with: + name: Event File + path: ${{ github.event_path }} diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index fb70c85fdec93..fff41e481c3cb 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -42,9 +42,7 @@ jobs: ] include: - python-version: "3.7" - extraPythonRequirement: "sqlalchemy==1.3.24 apache-airflow~=2.2.0" - python-version: "3.10" - extraPythonRequirement: "sqlalchemy~=1.4.0 apache-airflow>=2.4.0" fail-fast: false steps: - uses: actions/checkout@v3 @@ -56,8 +54,8 @@ jobs: run: ./metadata-ingestion/scripts/install_deps.sh - name: Install package run: ./gradlew :metadata-ingestion:installPackageOnly - - name: Run metadata-ingestion tests (extras ${{ matrix.extraPythonRequirement }}) - run: ./gradlew -Pextra_pip_requirements='${{ matrix.extraPythonRequirement }}' :metadata-ingestion:${{ matrix.command }} + - name: Run metadata-ingestion tests + run: ./gradlew :metadata-ingestion:${{ matrix.command }} - name: pip freeze show list installed if: always() run: source metadata-ingestion/venv/bin/activate && pip freeze @@ -80,7 +78,6 @@ jobs: name: pytest-${{ matrix.command }} verbose: true - event-file: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/test-results.yml b/.github/workflows/test-results.yml index 656e4dcbc4e43..0153060692271 100644 --- a/.github/workflows/test-results.yml +++ b/.github/workflows/test-results.yml @@ -2,7 +2,7 @@ name: Test Results on: workflow_run: - workflows: ["build & test", "metadata ingestion"] + workflows: ["build & test", "metadata ingestion", "Airflow Plugin"] types: - completed diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 21d59b777dd7c..49de5352f6d58 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -65,7 +65,7 @@ lazy_load_plugins = False | datahub.capture_executions | true | If true, we'll capture task runs in DataHub in addition to DAG definitions. | | datahub.graceful_exceptions | true | If set to true, most runtime errors in the lineage backend will be suppressed and will not cause the overall task to fail. Note that configuration issues will still throw exceptions. | -5. Configure `inlets` and `outlets` for your Airflow operators. For reference, look at the sample DAG in [`lineage_backend_demo.py`](../../metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_demo.py), or reference [`lineage_backend_taskflow_demo.py`](../../metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_taskflow_demo.py) if you're using the [TaskFlow API](https://airflow.apache.org/docs/apache-airflow/stable/concepts/taskflow.html). +5. Configure `inlets` and `outlets` for your Airflow operators. For reference, look at the sample DAG in [`lineage_backend_demo.py`](../../metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_demo.py), or reference [`lineage_backend_taskflow_demo.py`](../../metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_taskflow_demo.py) if you're using the [TaskFlow API](https://airflow.apache.org/docs/apache-airflow/stable/concepts/taskflow.html). 6. [optional] Learn more about [Airflow lineage](https://airflow.apache.org/docs/apache-airflow/stable/lineage.html), including shorthand notation and some automation. ### How to validate installation @@ -160,14 +160,14 @@ pip install acryl-datahub[airflow,datahub-kafka] - `capture_executions` (defaults to false): If true, it captures task runs as DataHub DataProcessInstances. - `graceful_exceptions` (defaults to true): If set to true, most runtime errors in the lineage backend will be suppressed and will not cause the overall task to fail. Note that configuration issues will still throw exceptions. -4. Configure `inlets` and `outlets` for your Airflow operators. For reference, look at the sample DAG in [`lineage_backend_demo.py`](../../metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_demo.py), or reference [`lineage_backend_taskflow_demo.py`](../../metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_taskflow_demo.py) if you're using the [TaskFlow API](https://airflow.apache.org/docs/apache-airflow/stable/concepts/taskflow.html). +4. Configure `inlets` and `outlets` for your Airflow operators. For reference, look at the sample DAG in [`lineage_backend_demo.py`](../../metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_demo.py), or reference [`lineage_backend_taskflow_demo.py`](../../metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_taskflow_demo.py) if you're using the [TaskFlow API](https://airflow.apache.org/docs/apache-airflow/stable/concepts/taskflow.html). 5. [optional] Learn more about [Airflow lineage](https://airflow.apache.org/docs/apache-airflow/stable/lineage.html), including shorthand notation and some automation. ## Emitting lineage via a separate operator Take a look at this sample DAG: -- [`lineage_emission_dag.py`](../../metadata-ingestion/src/datahub_provider/example_dags/lineage_emission_dag.py) - emits lineage using the DatahubEmitterOperator. +- [`lineage_emission_dag.py`](../../metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_emission_dag.py) - emits lineage using the DatahubEmitterOperator. In order to use this example, you must first configure the Datahub hook. Like in ingestion, we support a Datahub REST hook and a Kafka-based hook. See step 1 above for details. diff --git a/metadata-ingestion-modules/airflow-plugin/build.gradle b/metadata-ingestion-modules/airflow-plugin/build.gradle index 336be8fc94d44..d1e6f2f646491 100644 --- a/metadata-ingestion-modules/airflow-plugin/build.gradle +++ b/metadata-ingestion-modules/airflow-plugin/build.gradle @@ -7,6 +7,10 @@ ext { venv_name = 'venv' } +if (!project.hasProperty("extra_pip_requirements")) { + ext.extra_pip_requirements = "" +} + def pip_install_command = "${venv_name}/bin/pip install -e ../../metadata-ingestion" task checkPythonVersion(type: Exec) { @@ -14,30 +18,37 @@ task checkPythonVersion(type: Exec) { } task environmentSetup(type: Exec, dependsOn: checkPythonVersion) { + def sentinel_file = "${venv_name}/.venv_environment_sentinel" inputs.file file('setup.py') - outputs.dir("${venv_name}") - commandLine 'bash', '-c', "${python_executable} -m venv ${venv_name} && ${venv_name}/bin/python -m pip install --upgrade pip wheel 'setuptools>=63.0.0'" + outputs.file(sentinel_file) + commandLine 'bash', '-c', + "${python_executable} -m venv ${venv_name} &&" + + "${venv_name}/bin/python -m pip install --upgrade pip wheel 'setuptools>=63.0.0' && " + + "touch ${sentinel_file}" } -task installPackage(type: Exec, dependsOn: environmentSetup) { +task installPackage(type: Exec, dependsOn: [environmentSetup, ':metadata-ingestion:codegen']) { + def sentinel_file = "${venv_name}/.build_install_package_sentinel" inputs.file file('setup.py') - outputs.dir("${venv_name}") + outputs.file(sentinel_file) // Workaround for https://github.com/yaml/pyyaml/issues/601. // See https://github.com/yaml/pyyaml/issues/601#issuecomment-1638509577. // and https://github.com/datahub-project/datahub/pull/8435. commandLine 'bash', '-x', '-c', "${pip_install_command} install 'Cython<3.0' 'PyYAML<6' --no-build-isolation && " + - "${pip_install_command} -e ." + "${pip_install_command} -e . ${extra_pip_requirements} &&" + + "touch ${sentinel_file}" } task install(dependsOn: [installPackage]) task installDev(type: Exec, dependsOn: [install]) { + def sentinel_file = "${venv_name}/.build_install_dev_sentinel" inputs.file file('setup.py') - outputs.dir("${venv_name}") - outputs.file("${venv_name}/.build_install_dev_sentinel") + outputs.file("${sentinel_file}") commandLine 'bash', '-x', '-c', - "${pip_install_command} -e .[dev] && touch ${venv_name}/.build_install_dev_sentinel" + "${pip_install_command} -e .[dev] ${extra_pip_requirements} && " + + "touch ${sentinel_file}" } task lint(type: Exec, dependsOn: installDev) { @@ -45,9 +56,13 @@ task lint(type: Exec, dependsOn: installDev) { The find/sed combo below is a temporary work-around for the following mypy issue with airflow 2.2.0: "venv/lib/python3.8/site-packages/airflow/_vendor/connexion/spec.py:169: error: invalid syntax". */ - commandLine 'bash', '-x', '-c', + commandLine 'bash', '-c', "find ${venv_name}/lib -path *airflow/_vendor/connexion/spec.py -exec sed -i.bak -e '169,169s/ # type: List\\[str\\]//g' {} \\; && " + - "source ${venv_name}/bin/activate && black --check --diff src/ tests/ && isort --check --diff src/ tests/ && flake8 --count --statistics src/ tests/ && mypy src/ tests/" + "source ${venv_name}/bin/activate && set -x && " + + "black --check --diff src/ tests/ && " + + "isort --check --diff src/ tests/ && " + + "flake8 --count --statistics src/ tests/ && " + + "mypy --show-traceback --show-error-codes src/ tests/" } task lintFix(type: Exec, dependsOn: installDev) { commandLine 'bash', '-x', '-c', @@ -58,21 +73,13 @@ task lintFix(type: Exec, dependsOn: installDev) { "mypy src/ tests/ " } -task testQuick(type: Exec, dependsOn: installDev) { - // We can't enforce the coverage requirements if we run a subset of the tests. - inputs.files(project.fileTree(dir: "src/", include: "**/*.py")) - inputs.files(project.fileTree(dir: "tests/")) - outputs.dir("${venv_name}") - commandLine 'bash', '-x', '-c', - "source ${venv_name}/bin/activate && pytest -vv --continue-on-collection-errors --junit-xml=junit.quick.xml" -} - task installDevTest(type: Exec, dependsOn: [installDev]) { + def sentinel_file = "${venv_name}/.build_install_dev_test_sentinel" inputs.file file('setup.py') outputs.dir("${venv_name}") - outputs.file("${venv_name}/.build_install_dev_test_sentinel") + outputs.file("${sentinel_file}") commandLine 'bash', '-x', '-c', - "${pip_install_command} -e .[dev,integration-tests] && touch ${venv_name}/.build_install_dev_test_sentinel" + "${pip_install_command} -e .[dev,integration-tests] && touch ${sentinel_file}" } def testFile = hasProperty('testFile') ? testFile : 'unknown' @@ -89,6 +96,16 @@ task testSingle(dependsOn: [installDevTest]) { } } +task testQuick(type: Exec, dependsOn: installDevTest) { + // We can't enforce the coverage requirements if we run a subset of the tests. + inputs.files(project.fileTree(dir: "src/", include: "**/*.py")) + inputs.files(project.fileTree(dir: "tests/")) + outputs.dir("${venv_name}") + commandLine 'bash', '-x', '-c', + "source ${venv_name}/bin/activate && pytest -vv --continue-on-collection-errors --junit-xml=junit.quick.xml" +} + + task testFull(type: Exec, dependsOn: [testQuick, installDevTest]) { commandLine 'bash', '-x', '-c', "source ${venv_name}/bin/activate && pytest -m 'not slow_integration' -vv --continue-on-collection-errors --junit-xml=junit.full.xml" diff --git a/metadata-ingestion-modules/airflow-plugin/pyproject.toml b/metadata-ingestion-modules/airflow-plugin/pyproject.toml index 83b79e3146176..fba81486b9f67 100644 --- a/metadata-ingestion-modules/airflow-plugin/pyproject.toml +++ b/metadata-ingestion-modules/airflow-plugin/pyproject.toml @@ -9,7 +9,6 @@ extend-exclude = ''' ^/tmp ''' include = '\.pyi?$' -target-version = ['py36', 'py37', 'py38'] [tool.isort] indent = ' ' diff --git a/metadata-ingestion-modules/airflow-plugin/setup.cfg b/metadata-ingestion-modules/airflow-plugin/setup.cfg index c9a2ba93e9933..157bcce1c298d 100644 --- a/metadata-ingestion-modules/airflow-plugin/setup.cfg +++ b/metadata-ingestion-modules/airflow-plugin/setup.cfg @@ -69,4 +69,6 @@ exclude_lines = pragma: no cover @abstract if TYPE_CHECKING: -#omit = +omit = + # omit example dags + src/datahub_airflow_plugin/example_dags/* diff --git a/metadata-ingestion-modules/airflow-plugin/setup.py b/metadata-ingestion-modules/airflow-plugin/setup.py index c2571916ca5d0..c5bdc7ea329cd 100644 --- a/metadata-ingestion-modules/airflow-plugin/setup.py +++ b/metadata-ingestion-modules/airflow-plugin/setup.py @@ -13,16 +13,21 @@ def get_long_description(): return pathlib.Path(os.path.join(root, "README.md")).read_text() +rest_common = {"requests", "requests_file"} + base_requirements = { # Compatibility. "dataclasses>=0.6; python_version < '3.7'", - "typing_extensions>=3.10.0.2", + # Typing extension should be >=3.10.0.2 ideally but we can't restrict due to Airflow 2.0.2 dependency conflict + "typing_extensions>=3.7.4.3 ; python_version < '3.8'", + "typing_extensions>=3.10.0.2,<4.6.0 ; python_version >= '3.8'", "mypy_extensions>=0.4.3", # Actual dependencies. "typing-inspect", "pydantic>=1.5.1", "apache-airflow >= 2.0.2", - f"acryl-datahub[airflow] == {package_metadata['__version__']}", + *rest_common, + f"acryl-datahub == {package_metadata['__version__']}", } @@ -47,19 +52,18 @@ def get_long_description(): base_dev_requirements = { *base_requirements, *mypy_stubs, - "black>=21.12b0", + "black==22.12.0", "coverage>=5.1", "flake8>=3.8.3", "flake8-tidy-imports>=4.3.0", "isort>=5.7.0", - "mypy>=0.920", + "mypy>=1.4.0", # pydantic 1.8.2 is incompatible with mypy 0.910. # See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910. - "pydantic>=1.9.0", + "pydantic>=1.10", "pytest>=6.2.2", "pytest-asyncio>=0.16.0", "pytest-cov>=2.8.1", - "pytest-docker>=0.10.3,<0.12", "tox", "deepdiff", "requests-mock", @@ -127,5 +131,13 @@ def get_long_description(): "datahub-kafka": [ f"acryl-datahub[datahub-kafka] == {package_metadata['__version__']}" ], + "integration-tests": [ + f"acryl-datahub[datahub-kafka] == {package_metadata['__version__']}", + # Extra requirements for Airflow. + "apache-airflow[snowflake]>=2.0.2", # snowflake is used in example dags + # Because of https://github.com/snowflakedb/snowflake-sqlalchemy/issues/350 we need to restrict SQLAlchemy's max version. + "SQLAlchemy<1.4.42", + "virtualenv", # needed by PythonVirtualenvOperator + ], }, ) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_compat.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_compat.py new file mode 100644 index 0000000000000..67c3348ec987c --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_compat.py @@ -0,0 +1,12 @@ +# This module must be imported before any Airflow imports in any of our files. +# The AIRFLOW_PATCHED just helps avoid flake8 errors. + +from datahub.utilities._markupsafe_compat import MARKUPSAFE_PATCHED + +assert MARKUPSAFE_PATCHED + +AIRFLOW_PATCHED = True + +__all__ = [ + "AIRFLOW_PATCHED", +] diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py new file mode 100644 index 0000000000000..5ad20e1f72551 --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py @@ -0,0 +1,29 @@ +from airflow.models.baseoperator import BaseOperator + +from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED + +try: + from airflow.models.mappedoperator import MappedOperator + from airflow.models.operator import Operator + from airflow.operators.empty import EmptyOperator +except ModuleNotFoundError: + # Operator isn't a real class, but rather a type alias defined + # as the union of BaseOperator and MappedOperator. + # Since older versions of Airflow don't have MappedOperator, we can just use BaseOperator. + Operator = BaseOperator # type: ignore + MappedOperator = None # type: ignore + from airflow.operators.dummy import DummyOperator as EmptyOperator # type: ignore + +try: + from airflow.sensors.external_task import ExternalTaskSensor +except ImportError: + from airflow.sensors.external_task_sensor import ExternalTaskSensor # type: ignore + +assert AIRFLOW_PATCHED + +__all__ = [ + "Operator", + "MappedOperator", + "EmptyOperator", + "ExternalTaskSensor", +] diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_lineage_core.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_lineage_core.py new file mode 100644 index 0000000000000..d91c039ffa718 --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_lineage_core.py @@ -0,0 +1,115 @@ +from datetime import datetime +from typing import TYPE_CHECKING, Dict, List + +import datahub.emitter.mce_builder as builder +from datahub.api.entities.dataprocess.dataprocess_instance import InstanceRunResult +from datahub.configuration.common import ConfigModel +from datahub.utilities.urns.dataset_urn import DatasetUrn + +from datahub_airflow_plugin.client.airflow_generator import AirflowGenerator +from datahub_airflow_plugin.entities import _Entity + +if TYPE_CHECKING: + from airflow import DAG + from airflow.models.dagrun import DagRun + from airflow.models.taskinstance import TaskInstance + + from datahub_airflow_plugin._airflow_shims import Operator + from datahub_airflow_plugin.hooks.datahub import DatahubGenericHook + + +def _entities_to_urn_list(iolets: List[_Entity]) -> List[DatasetUrn]: + return [DatasetUrn.create_from_string(let.urn) for let in iolets] + + +class DatahubBasicLineageConfig(ConfigModel): + enabled: bool = True + + # DataHub hook connection ID. + datahub_conn_id: str + + # Cluster to associate with the pipelines and tasks. Defaults to "prod". + cluster: str = builder.DEFAULT_FLOW_CLUSTER + + # If true, the owners field of the DAG will be capture as a DataHub corpuser. + capture_ownership_info: bool = True + + # If true, the tags field of the DAG will be captured as DataHub tags. + capture_tags_info: bool = True + + capture_executions: bool = False + + def make_emitter_hook(self) -> "DatahubGenericHook": + # This is necessary to avoid issues with circular imports. + from datahub_airflow_plugin.hooks.datahub import DatahubGenericHook + + return DatahubGenericHook(self.datahub_conn_id) + + +def send_lineage_to_datahub( + config: DatahubBasicLineageConfig, + operator: "Operator", + inlets: List[_Entity], + outlets: List[_Entity], + context: Dict, +) -> None: + if not config.enabled: + return + + dag: "DAG" = context["dag"] + task: "Operator" = context["task"] + ti: "TaskInstance" = context["task_instance"] + + hook = config.make_emitter_hook() + emitter = hook.make_emitter() + + dataflow = AirflowGenerator.generate_dataflow( + cluster=config.cluster, + dag=dag, + capture_tags=config.capture_tags_info, + capture_owner=config.capture_ownership_info, + ) + dataflow.emit(emitter) + operator.log.info(f"Emitted from Lineage: {dataflow}") + + datajob = AirflowGenerator.generate_datajob( + cluster=config.cluster, + task=task, + dag=dag, + capture_tags=config.capture_tags_info, + capture_owner=config.capture_ownership_info, + ) + datajob.inlets.extend(_entities_to_urn_list(inlets)) + datajob.outlets.extend(_entities_to_urn_list(outlets)) + + datajob.emit(emitter) + operator.log.info(f"Emitted from Lineage: {datajob}") + + if config.capture_executions: + dag_run: "DagRun" = context["dag_run"] + + dpi = AirflowGenerator.run_datajob( + emitter=emitter, + cluster=config.cluster, + ti=ti, + dag=dag, + dag_run=dag_run, + datajob=datajob, + emit_templates=False, + ) + + operator.log.info(f"Emitted from Lineage: {dpi}") + + dpi = AirflowGenerator.complete_datajob( + emitter=emitter, + cluster=config.cluster, + ti=ti, + dag=dag, + dag_run=dag_run, + datajob=datajob, + result=InstanceRunResult.SUCCESS, + end_timestamp_millis=int(datetime.utcnow().timestamp() * 1000), + ) + operator.log.info(f"Emitted from Lineage: {dpi}") + + emitter.flush() diff --git a/metadata-ingestion/src/datahub_provider/example_dags/__init__.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/__init__.py similarity index 100% rename from metadata-ingestion/src/datahub_provider/example_dags/__init__.py rename to metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/__init__.py diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py new file mode 100644 index 0000000000000..b5e86e14d85d0 --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/client/airflow_generator.py @@ -0,0 +1,512 @@ +from typing import TYPE_CHECKING, Dict, List, Optional, Set, Union, cast + +from airflow.configuration import conf +from datahub.api.entities.datajob import DataFlow, DataJob +from datahub.api.entities.dataprocess.dataprocess_instance import ( + DataProcessInstance, + InstanceRunResult, +) +from datahub.metadata.schema_classes import DataProcessTypeClass +from datahub.utilities.urns.data_flow_urn import DataFlowUrn +from datahub.utilities.urns.data_job_urn import DataJobUrn + +from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED + +assert AIRFLOW_PATCHED + +if TYPE_CHECKING: + from airflow import DAG + from airflow.models import DagRun, TaskInstance + from datahub.emitter.kafka_emitter import DatahubKafkaEmitter + from datahub.emitter.rest_emitter import DatahubRestEmitter + + from datahub_airflow_plugin._airflow_shims import Operator + + +def _task_downstream_task_ids(operator: "Operator") -> Set[str]: + if hasattr(operator, "downstream_task_ids"): + return operator.downstream_task_ids + return operator._downstream_task_id # type: ignore[attr-defined,union-attr] + + +class AirflowGenerator: + @staticmethod + def _get_dependencies( + task: "Operator", dag: "DAG", flow_urn: DataFlowUrn + ) -> List[DataJobUrn]: + from datahub_airflow_plugin._airflow_shims import ExternalTaskSensor + + # resolve URNs for upstream nodes in subdags upstream of the current task. + upstream_subdag_task_urns: List[DataJobUrn] = [] + + for upstream_task_id in task.upstream_task_ids: + upstream_task = dag.task_dict[upstream_task_id] + + # if upstream task is not a subdag, then skip it + upstream_subdag = getattr(upstream_task, "subdag", None) + if upstream_subdag is None: + continue + + # else, link the leaf tasks of the upstream subdag as upstream tasks + for upstream_subdag_task_id in upstream_subdag.task_dict: + upstream_subdag_task = upstream_subdag.task_dict[ + upstream_subdag_task_id + ] + + upstream_subdag_task_urn = DataJobUrn.create_from_ids( + job_id=upstream_subdag_task_id, data_flow_urn=str(flow_urn) + ) + + # if subdag task is a leaf task, then link it as an upstream task + if len(_task_downstream_task_ids(upstream_subdag_task)) == 0: + upstream_subdag_task_urns.append(upstream_subdag_task_urn) + + # resolve URNs for upstream nodes that trigger the subdag containing the current task. + # (if it is in a subdag at all) + upstream_subdag_triggers: List[DataJobUrn] = [] + + # subdags are always named with 'parent.child' style or Airflow won't run them + # add connection from subdag trigger(s) if subdag task has no upstreams + if ( + dag.is_subdag + and dag.parent_dag is not None + and len(task.upstream_task_ids) == 0 + ): + # filter through the parent dag's tasks and find the subdag trigger(s) + subdags = [ + x for x in dag.parent_dag.task_dict.values() if x.subdag is not None + ] + matched_subdags = [ + x for x in subdags if x.subdag and x.subdag.dag_id == dag.dag_id + ] + + # id of the task containing the subdag + subdag_task_id = matched_subdags[0].task_id + + # iterate through the parent dag's tasks and find the ones that trigger the subdag + for upstream_task_id in dag.parent_dag.task_dict: + upstream_task = dag.parent_dag.task_dict[upstream_task_id] + upstream_task_urn = DataJobUrn.create_from_ids( + data_flow_urn=str(flow_urn), job_id=upstream_task_id + ) + + # if the task triggers the subdag, link it to this node in the subdag + if subdag_task_id in _task_downstream_task_ids(upstream_task): + upstream_subdag_triggers.append(upstream_task_urn) + + # If the operator is an ExternalTaskSensor then we set the remote task as upstream. + # It is possible to tie an external sensor to DAG if external_task_id is omitted but currently we can't tie + # jobflow to anothet jobflow. + external_task_upstreams = [] + if task.task_type == "ExternalTaskSensor": + task = cast(ExternalTaskSensor, task) + if hasattr(task, "external_task_id") and task.external_task_id is not None: + external_task_upstreams = [ + DataJobUrn.create_from_ids( + job_id=task.external_task_id, + data_flow_urn=str( + DataFlowUrn.create_from_ids( + orchestrator=flow_urn.get_orchestrator_name(), + flow_id=task.external_dag_id, + env=flow_urn.get_env(), + ) + ), + ) + ] + # exclude subdag operator tasks since these are not emitted, resulting in empty metadata + upstream_tasks = ( + [ + DataJobUrn.create_from_ids(job_id=task_id, data_flow_urn=str(flow_urn)) + for task_id in task.upstream_task_ids + if getattr(dag.task_dict[task_id], "subdag", None) is None + ] + + upstream_subdag_task_urns + + upstream_subdag_triggers + + external_task_upstreams + ) + return upstream_tasks + + @staticmethod + def generate_dataflow( + cluster: str, + dag: "DAG", + capture_owner: bool = True, + capture_tags: bool = True, + ) -> DataFlow: + """ + Generates a Dataflow object from an Airflow DAG + :param cluster: str - name of the cluster + :param dag: DAG - + :param capture_tags: + :param capture_owner: + :return: DataFlow - Data generated dataflow + """ + id = dag.dag_id + orchestrator = "airflow" + description = f"{dag.description}\n\n{dag.doc_md or ''}" + data_flow = DataFlow( + env=cluster, id=id, orchestrator=orchestrator, description=description + ) + + flow_property_bag: Dict[str, str] = {} + + allowed_flow_keys = [ + "_access_control", + "_concurrency", + "_default_view", + "catchup", + "fileloc", + "is_paused_upon_creation", + "start_date", + "tags", + "timezone", + ] + + for key in allowed_flow_keys: + if hasattr(dag, key): + flow_property_bag[key] = repr(getattr(dag, key)) + + data_flow.properties = flow_property_bag + base_url = conf.get("webserver", "base_url") + data_flow.url = f"{base_url}/tree?dag_id={dag.dag_id}" + + if capture_owner and dag.owner: + data_flow.owners.add(dag.owner) + + if capture_tags and dag.tags: + data_flow.tags.update(dag.tags) + + return data_flow + + @staticmethod + def _get_description(task: "Operator") -> Optional[str]: + from airflow.models.baseoperator import BaseOperator + + if not isinstance(task, BaseOperator): + # TODO: Get docs for mapped operators. + return None + + if hasattr(task, "doc") and task.doc: + return task.doc + elif hasattr(task, "doc_md") and task.doc_md: + return task.doc_md + elif hasattr(task, "doc_json") and task.doc_json: + return task.doc_json + elif hasattr(task, "doc_yaml") and task.doc_yaml: + return task.doc_yaml + elif hasattr(task, "doc_rst") and task.doc_yaml: + return task.doc_yaml + return None + + @staticmethod + def generate_datajob( + cluster: str, + task: "Operator", + dag: "DAG", + set_dependencies: bool = True, + capture_owner: bool = True, + capture_tags: bool = True, + ) -> DataJob: + """ + + :param cluster: str + :param task: TaskIntance + :param dag: DAG + :param set_dependencies: bool - whether to extract dependencies from airflow task + :param capture_owner: bool - whether to extract owner from airflow task + :param capture_tags: bool - whether to set tags automatically from airflow task + :return: DataJob - returns the generated DataJob object + """ + dataflow_urn = DataFlowUrn.create_from_ids( + orchestrator="airflow", env=cluster, flow_id=dag.dag_id + ) + datajob = DataJob(id=task.task_id, flow_urn=dataflow_urn) + + # TODO add support for MappedOperator + datajob.description = AirflowGenerator._get_description(task) + + job_property_bag: Dict[str, str] = {} + + allowed_task_keys = [ + "_downstream_task_ids", + "_inlets", + "_outlets", + "_task_type", + "_task_module", + "depends_on_past", + "email", + "label", + "execution_timeout", + "sla", + "sql", + "task_id", + "trigger_rule", + "wait_for_downstream", + # In Airflow 2.3, _downstream_task_ids was renamed to downstream_task_ids + "downstream_task_ids", + # In Airflow 2.4, _inlets and _outlets were removed in favor of non-private versions. + "inlets", + "outlets", + ] + + for key in allowed_task_keys: + if hasattr(task, key): + job_property_bag[key] = repr(getattr(task, key)) + + datajob.properties = job_property_bag + base_url = conf.get("webserver", "base_url") + datajob.url = f"{base_url}/taskinstance/list/?flt1_dag_id_equals={datajob.flow_urn.get_flow_id()}&_flt_3_task_id={task.task_id}" + + if capture_owner and dag.owner: + datajob.owners.add(dag.owner) + + if capture_tags and dag.tags: + datajob.tags.update(dag.tags) + + if set_dependencies: + datajob.upstream_urns.extend( + AirflowGenerator._get_dependencies( + task=task, dag=dag, flow_urn=datajob.flow_urn + ) + ) + + return datajob + + @staticmethod + def create_datajob_instance( + cluster: str, + task: "Operator", + dag: "DAG", + data_job: Optional[DataJob] = None, + ) -> DataProcessInstance: + if data_job is None: + data_job = AirflowGenerator.generate_datajob(cluster, task=task, dag=dag) + dpi = DataProcessInstance.from_datajob( + datajob=data_job, id=task.task_id, clone_inlets=True, clone_outlets=True + ) + return dpi + + @staticmethod + def run_dataflow( + emitter: Union["DatahubRestEmitter", "DatahubKafkaEmitter"], + cluster: str, + dag_run: "DagRun", + start_timestamp_millis: Optional[int] = None, + dataflow: Optional[DataFlow] = None, + ) -> None: + if dataflow is None: + assert dag_run.dag + dataflow = AirflowGenerator.generate_dataflow(cluster, dag_run.dag) + + if start_timestamp_millis is None: + assert dag_run.execution_date + start_timestamp_millis = int(dag_run.execution_date.timestamp() * 1000) + + assert dag_run.run_id + dpi = DataProcessInstance.from_dataflow(dataflow=dataflow, id=dag_run.run_id) + + # This property only exists in Airflow2 + if hasattr(dag_run, "run_type"): + from airflow.utils.types import DagRunType + + if dag_run.run_type == DagRunType.SCHEDULED: + dpi.type = DataProcessTypeClass.BATCH_SCHEDULED + elif dag_run.run_type == DagRunType.MANUAL: + dpi.type = DataProcessTypeClass.BATCH_AD_HOC + else: + if dag_run.run_id.startswith("scheduled__"): + dpi.type = DataProcessTypeClass.BATCH_SCHEDULED + else: + dpi.type = DataProcessTypeClass.BATCH_AD_HOC + + property_bag: Dict[str, str] = {} + property_bag["run_id"] = str(dag_run.run_id) + property_bag["execution_date"] = str(dag_run.execution_date) + property_bag["end_date"] = str(dag_run.end_date) + property_bag["start_date"] = str(dag_run.start_date) + property_bag["creating_job_id"] = str(dag_run.creating_job_id) + # These properties only exists in Airflow>=2.2.0 + if hasattr(dag_run, "data_interval_start") and hasattr( + dag_run, "data_interval_end" + ): + property_bag["data_interval_start"] = str(dag_run.data_interval_start) + property_bag["data_interval_end"] = str(dag_run.data_interval_end) + property_bag["external_trigger"] = str(dag_run.external_trigger) + dpi.properties.update(property_bag) + + dpi.emit_process_start( + emitter=emitter, start_timestamp_millis=start_timestamp_millis + ) + + @staticmethod + def complete_dataflow( + emitter: Union["DatahubRestEmitter", "DatahubKafkaEmitter"], + cluster: str, + dag_run: "DagRun", + end_timestamp_millis: Optional[int] = None, + dataflow: Optional[DataFlow] = None, + ) -> None: + """ + + :param emitter: DatahubRestEmitter - the datahub rest emitter to emit the generated mcps + :param cluster: str - name of the cluster + :param dag_run: DagRun + :param end_timestamp_millis: Optional[int] - the completion time in milliseconds if not set the current time will be used. + :param dataflow: Optional[Dataflow] + """ + if dataflow is None: + assert dag_run.dag + dataflow = AirflowGenerator.generate_dataflow(cluster, dag_run.dag) + + assert dag_run.run_id + dpi = DataProcessInstance.from_dataflow(dataflow=dataflow, id=dag_run.run_id) + if end_timestamp_millis is None: + if dag_run.end_date is None: + raise Exception( + f"Dag {dag_run.dag_id}_{dag_run.run_id} is still running and unable to get end_date..." + ) + end_timestamp_millis = int(dag_run.end_date.timestamp() * 1000) + + # We should use DagRunState but it is not available in Airflow 1 + if dag_run.state == "success": + result = InstanceRunResult.SUCCESS + elif dag_run.state == "failed": + result = InstanceRunResult.FAILURE + else: + raise Exception( + f"Result should be either success or failure and it was {dag_run.state}" + ) + + dpi.emit_process_end( + emitter=emitter, + end_timestamp_millis=end_timestamp_millis, + result=result, + result_type="airflow", + ) + + @staticmethod + def run_datajob( + emitter: Union["DatahubRestEmitter", "DatahubKafkaEmitter"], + cluster: str, + ti: "TaskInstance", + dag: "DAG", + dag_run: "DagRun", + start_timestamp_millis: Optional[int] = None, + datajob: Optional[DataJob] = None, + attempt: Optional[int] = None, + emit_templates: bool = True, + ) -> DataProcessInstance: + if datajob is None: + datajob = AirflowGenerator.generate_datajob(cluster, ti.task, dag) + + assert dag_run.run_id + dpi = DataProcessInstance.from_datajob( + datajob=datajob, + id=f"{dag.dag_id}_{ti.task_id}_{dag_run.run_id}", + clone_inlets=True, + clone_outlets=True, + ) + job_property_bag: Dict[str, str] = {} + job_property_bag["run_id"] = str(dag_run.run_id) + job_property_bag["duration"] = str(ti.duration) + job_property_bag["start_date"] = str(ti.start_date) + job_property_bag["end_date"] = str(ti.end_date) + job_property_bag["execution_date"] = str(ti.execution_date) + job_property_bag["try_number"] = str(ti.try_number - 1) + job_property_bag["hostname"] = str(ti.hostname) + job_property_bag["max_tries"] = str(ti.max_tries) + # Not compatible with Airflow 1 + if hasattr(ti, "external_executor_id"): + job_property_bag["external_executor_id"] = str(ti.external_executor_id) + job_property_bag["pid"] = str(ti.pid) + job_property_bag["state"] = str(ti.state) + job_property_bag["operator"] = str(ti.operator) + job_property_bag["priority_weight"] = str(ti.priority_weight) + job_property_bag["unixname"] = str(ti.unixname) + job_property_bag["log_url"] = ti.log_url + dpi.properties.update(job_property_bag) + dpi.url = ti.log_url + + # This property only exists in Airflow2 + if hasattr(ti, "dag_run") and hasattr(ti.dag_run, "run_type"): + from airflow.utils.types import DagRunType + + if ti.dag_run.run_type == DagRunType.SCHEDULED: + dpi.type = DataProcessTypeClass.BATCH_SCHEDULED + elif ti.dag_run.run_type == DagRunType.MANUAL: + dpi.type = DataProcessTypeClass.BATCH_AD_HOC + else: + if dag_run.run_id.startswith("scheduled__"): + dpi.type = DataProcessTypeClass.BATCH_SCHEDULED + else: + dpi.type = DataProcessTypeClass.BATCH_AD_HOC + + if start_timestamp_millis is None: + assert ti.start_date + start_timestamp_millis = int(ti.start_date.timestamp() * 1000) + + if attempt is None: + attempt = ti.try_number + + dpi.emit_process_start( + emitter=emitter, + start_timestamp_millis=start_timestamp_millis, + attempt=attempt, + emit_template=emit_templates, + ) + return dpi + + @staticmethod + def complete_datajob( + emitter: Union["DatahubRestEmitter", "DatahubKafkaEmitter"], + cluster: str, + ti: "TaskInstance", + dag: "DAG", + dag_run: "DagRun", + end_timestamp_millis: Optional[int] = None, + result: Optional[InstanceRunResult] = None, + datajob: Optional[DataJob] = None, + ) -> DataProcessInstance: + """ + + :param emitter: DatahubRestEmitter + :param cluster: str + :param ti: TaskInstance + :param dag: DAG + :param dag_run: DagRun + :param end_timestamp_millis: Optional[int] + :param result: Optional[str] One of the result from datahub.metadata.schema_class.RunResultTypeClass + :param datajob: Optional[DataJob] + :return: DataProcessInstance + """ + if datajob is None: + datajob = AirflowGenerator.generate_datajob(cluster, ti.task, dag) + + if end_timestamp_millis is None: + assert ti.end_date + end_timestamp_millis = int(ti.end_date.timestamp() * 1000) + + if result is None: + # We should use TaskInstanceState but it is not available in Airflow 1 + if ti.state == "success": + result = InstanceRunResult.SUCCESS + elif ti.state == "failed": + result = InstanceRunResult.FAILURE + else: + raise Exception( + f"Result should be either success or failure and it was {ti.state}" + ) + + dpi = DataProcessInstance.from_datajob( + datajob=datajob, + id=f"{dag.dag_id}_{ti.task_id}_{dag_run.run_id}", + clone_inlets=True, + clone_outlets=True, + ) + dpi.emit_process_end( + emitter=emitter, + end_timestamp_millis=end_timestamp_millis, + result=result, + result_type="airflow", + ) + return dpi diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py index 226a7382f7595..d1cec9e5c1b54 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py @@ -1,4 +1,367 @@ -# This package serves as a shim, but the actual implementation lives in datahub_provider -# from the acryl-datahub package. We leave this shim here to avoid breaking existing -# Airflow installs. -from datahub_provider._plugin import DatahubPlugin # noqa: F401 +import contextlib +import logging +import traceback +from typing import Any, Callable, Iterable, List, Optional, Union + +from airflow.configuration import conf +from airflow.lineage import PIPELINE_OUTLETS +from airflow.models.baseoperator import BaseOperator +from airflow.plugins_manager import AirflowPlugin +from airflow.utils.module_loading import import_string +from cattr import structure +from datahub.api.entities.dataprocess.dataprocess_instance import InstanceRunResult + +from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED +from datahub_airflow_plugin._airflow_shims import MappedOperator, Operator +from datahub_airflow_plugin.client.airflow_generator import AirflowGenerator +from datahub_airflow_plugin.hooks.datahub import DatahubGenericHook +from datahub_airflow_plugin.lineage.datahub import DatahubLineageConfig + +assert AIRFLOW_PATCHED +logger = logging.getLogger(__name__) + +TASK_ON_FAILURE_CALLBACK = "on_failure_callback" +TASK_ON_SUCCESS_CALLBACK = "on_success_callback" + + +def get_lineage_config() -> DatahubLineageConfig: + """Load the lineage config from airflow.cfg.""" + + enabled = conf.get("datahub", "enabled", fallback=True) + datahub_conn_id = conf.get("datahub", "conn_id", fallback="datahub_rest_default") + cluster = conf.get("datahub", "cluster", fallback="prod") + graceful_exceptions = conf.get("datahub", "graceful_exceptions", fallback=True) + capture_tags_info = conf.get("datahub", "capture_tags_info", fallback=True) + capture_ownership_info = conf.get( + "datahub", "capture_ownership_info", fallback=True + ) + capture_executions = conf.get("datahub", "capture_executions", fallback=True) + return DatahubLineageConfig( + enabled=enabled, + datahub_conn_id=datahub_conn_id, + cluster=cluster, + graceful_exceptions=graceful_exceptions, + capture_ownership_info=capture_ownership_info, + capture_tags_info=capture_tags_info, + capture_executions=capture_executions, + ) + + +def _task_inlets(operator: "Operator") -> List: + # From Airflow 2.4 _inlets is dropped and inlets used consistently. Earlier it was not the case, so we have to stick there to _inlets + if hasattr(operator, "_inlets"): + return operator._inlets # type: ignore[attr-defined, union-attr] + return operator.inlets + + +def _task_outlets(operator: "Operator") -> List: + # From Airflow 2.4 _outlets is dropped and inlets used consistently. Earlier it was not the case, so we have to stick there to _outlets + # We have to use _outlets because outlets is empty in Airflow < 2.4.0 + if hasattr(operator, "_outlets"): + return operator._outlets # type: ignore[attr-defined, union-attr] + return operator.outlets + + +def get_inlets_from_task(task: BaseOperator, context: Any) -> Iterable[Any]: + # TODO: Fix for https://github.com/apache/airflow/commit/1b1f3fabc5909a447a6277cafef3a0d4ef1f01ae + # in Airflow 2.4. + # TODO: ignore/handle airflow's dataset type in our lineage + + inlets: List[Any] = [] + task_inlets = _task_inlets(task) + # From Airflow 2.3 this should be AbstractOperator but due to compatibility reason lets use BaseOperator + if isinstance(task_inlets, (str, BaseOperator)): + inlets = [ + task_inlets, + ] + + if task_inlets and isinstance(task_inlets, list): + inlets = [] + task_ids = ( + {o for o in task_inlets if isinstance(o, str)} + .union(op.task_id for op in task_inlets if isinstance(op, BaseOperator)) + .intersection(task.get_flat_relative_ids(upstream=True)) + ) + + from airflow.lineage import AUTO + + # pick up unique direct upstream task_ids if AUTO is specified + if AUTO.upper() in task_inlets or AUTO.lower() in task_inlets: + print("Picking up unique direct upstream task_ids as AUTO is specified") + task_ids = task_ids.union( + task_ids.symmetric_difference(task.upstream_task_ids) + ) + + inlets = task.xcom_pull( + context, task_ids=list(task_ids), dag_id=task.dag_id, key=PIPELINE_OUTLETS + ) + + # re-instantiate the obtained inlets + inlets = [ + structure(item["data"], import_string(item["type_name"])) + # _get_instance(structure(item, Metadata)) + for sublist in inlets + if sublist + for item in sublist + ] + + for inlet in task_inlets: + if not isinstance(inlet, str): + inlets.append(inlet) + + return inlets + + +def _make_emit_callback( + logger: logging.Logger, +) -> Callable[[Optional[Exception], str], None]: + def emit_callback(err: Optional[Exception], msg: str) -> None: + if err: + logger.error(f"Error sending metadata to datahub: {msg}", exc_info=err) + + return emit_callback + + +def datahub_task_status_callback(context, status): + ti = context["ti"] + task: "BaseOperator" = ti.task + dag = context["dag"] + + # This code is from the original airflow lineage code -> + # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py + inlets = get_inlets_from_task(task, context) + + emitter = ( + DatahubGenericHook(context["_datahub_config"].datahub_conn_id) + .get_underlying_hook() + .make_emitter() + ) + + dataflow = AirflowGenerator.generate_dataflow( + cluster=context["_datahub_config"].cluster, + dag=dag, + capture_tags=context["_datahub_config"].capture_tags_info, + capture_owner=context["_datahub_config"].capture_ownership_info, + ) + task.log.info(f"Emitting Datahub Dataflow: {dataflow}") + dataflow.emit(emitter, callback=_make_emit_callback(task.log)) + + datajob = AirflowGenerator.generate_datajob( + cluster=context["_datahub_config"].cluster, + task=task, + dag=dag, + capture_tags=context["_datahub_config"].capture_tags_info, + capture_owner=context["_datahub_config"].capture_ownership_info, + ) + + for inlet in inlets: + datajob.inlets.append(inlet.urn) + + task_outlets = _task_outlets(task) + for outlet in task_outlets: + datajob.outlets.append(outlet.urn) + + task.log.info(f"Emitting Datahub Datajob: {datajob}") + datajob.emit(emitter, callback=_make_emit_callback(task.log)) + + if context["_datahub_config"].capture_executions: + dpi = AirflowGenerator.run_datajob( + emitter=emitter, + cluster=context["_datahub_config"].cluster, + ti=context["ti"], + dag=dag, + dag_run=context["dag_run"], + datajob=datajob, + start_timestamp_millis=int(ti.start_date.timestamp() * 1000), + ) + + task.log.info(f"Emitted Start Datahub Dataprocess Instance: {dpi}") + + dpi = AirflowGenerator.complete_datajob( + emitter=emitter, + cluster=context["_datahub_config"].cluster, + ti=context["ti"], + dag_run=context["dag_run"], + result=status, + dag=dag, + datajob=datajob, + end_timestamp_millis=int(ti.end_date.timestamp() * 1000), + ) + task.log.info(f"Emitted Completed Data Process Instance: {dpi}") + + emitter.flush() + + +def datahub_pre_execution(context): + ti = context["ti"] + task: "BaseOperator" = ti.task + dag = context["dag"] + + task.log.info("Running Datahub pre_execute method") + + emitter = ( + DatahubGenericHook(context["_datahub_config"].datahub_conn_id) + .get_underlying_hook() + .make_emitter() + ) + + # This code is from the original airflow lineage code -> + # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py + inlets = get_inlets_from_task(task, context) + + datajob = AirflowGenerator.generate_datajob( + cluster=context["_datahub_config"].cluster, + task=context["ti"].task, + dag=dag, + capture_tags=context["_datahub_config"].capture_tags_info, + capture_owner=context["_datahub_config"].capture_ownership_info, + ) + + for inlet in inlets: + datajob.inlets.append(inlet.urn) + + task_outlets = _task_outlets(task) + + for outlet in task_outlets: + datajob.outlets.append(outlet.urn) + + task.log.info(f"Emitting Datahub dataJob {datajob}") + datajob.emit(emitter, callback=_make_emit_callback(task.log)) + + if context["_datahub_config"].capture_executions: + dpi = AirflowGenerator.run_datajob( + emitter=emitter, + cluster=context["_datahub_config"].cluster, + ti=context["ti"], + dag=dag, + dag_run=context["dag_run"], + datajob=datajob, + start_timestamp_millis=int(ti.start_date.timestamp() * 1000), + ) + + task.log.info(f"Emitting Datahub Dataprocess Instance: {dpi}") + + emitter.flush() + + +def _wrap_pre_execution(pre_execution): + def custom_pre_execution(context): + config = get_lineage_config() + if config.enabled: + context["_datahub_config"] = config + datahub_pre_execution(context) + + # Call original policy + if pre_execution: + pre_execution(context) + + return custom_pre_execution + + +def _wrap_on_failure_callback(on_failure_callback): + def custom_on_failure_callback(context): + config = get_lineage_config() + if config.enabled: + context["_datahub_config"] = config + try: + datahub_task_status_callback(context, status=InstanceRunResult.FAILURE) + except Exception as e: + if not config.graceful_exceptions: + raise e + else: + print(f"Exception: {traceback.format_exc()}") + + # Call original policy + if on_failure_callback: + on_failure_callback(context) + + return custom_on_failure_callback + + +def _wrap_on_success_callback(on_success_callback): + def custom_on_success_callback(context): + config = get_lineage_config() + if config.enabled: + context["_datahub_config"] = config + try: + datahub_task_status_callback(context, status=InstanceRunResult.SUCCESS) + except Exception as e: + if not config.graceful_exceptions: + raise e + else: + print(f"Exception: {traceback.format_exc()}") + + # Call original policy + if on_success_callback: + on_success_callback(context) + + return custom_on_success_callback + + +def task_policy(task: Union[BaseOperator, MappedOperator]) -> None: + task.log.debug(f"Setting task policy for Dag: {task.dag_id} Task: {task.task_id}") + # task.add_inlets(["auto"]) + # task.pre_execute = _wrap_pre_execution(task.pre_execute) + + # MappedOperator's callbacks don't have setters until Airflow 2.X.X + # https://github.com/apache/airflow/issues/24547 + # We can bypass this by going through partial_kwargs for now + if MappedOperator and isinstance(task, MappedOperator): # type: ignore + on_failure_callback_prop: property = getattr( + MappedOperator, TASK_ON_FAILURE_CALLBACK + ) + on_success_callback_prop: property = getattr( + MappedOperator, TASK_ON_SUCCESS_CALLBACK + ) + if not on_failure_callback_prop.fset or not on_success_callback_prop.fset: + task.log.debug( + "Using MappedOperator's partial_kwargs instead of callback properties" + ) + task.partial_kwargs[TASK_ON_FAILURE_CALLBACK] = _wrap_on_failure_callback( + task.on_failure_callback + ) + task.partial_kwargs[TASK_ON_SUCCESS_CALLBACK] = _wrap_on_success_callback( + task.on_success_callback + ) + return + + task.on_failure_callback = _wrap_on_failure_callback(task.on_failure_callback) # type: ignore + task.on_success_callback = _wrap_on_success_callback(task.on_success_callback) # type: ignore + # task.pre_execute = _wrap_pre_execution(task.pre_execute) + + +def _wrap_task_policy(policy): + if policy and hasattr(policy, "_task_policy_patched_by"): + return policy + + def custom_task_policy(task): + policy(task) + task_policy(task) + + # Add a flag to the policy to indicate that we've patched it. + custom_task_policy._task_policy_patched_by = "datahub_plugin" # type: ignore[attr-defined] + return custom_task_policy + + +def _patch_policy(settings): + if hasattr(settings, "task_policy"): + datahub_task_policy = _wrap_task_policy(settings.task_policy) + settings.task_policy = datahub_task_policy + + +def _patch_datahub_policy(): + with contextlib.suppress(ImportError): + import airflow_local_settings + + _patch_policy(airflow_local_settings) + + from airflow.models.dagbag import settings + + _patch_policy(settings) + + +_patch_datahub_policy() + + +class DatahubPlugin(AirflowPlugin): + name = "datahub_plugin" diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/entities.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/entities.py new file mode 100644 index 0000000000000..69f667cad3241 --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/entities.py @@ -0,0 +1,47 @@ +from abc import abstractmethod +from typing import Optional + +import attr +import datahub.emitter.mce_builder as builder +from datahub.utilities.urns.urn import guess_entity_type + + +class _Entity: + @property + @abstractmethod + def urn(self) -> str: + pass + + +@attr.s(auto_attribs=True, str=True) +class Dataset(_Entity): + platform: str + name: str + env: str = builder.DEFAULT_ENV + platform_instance: Optional[str] = None + + @property + def urn(self): + return builder.make_dataset_urn_with_platform_instance( + platform=self.platform, + name=self.name, + platform_instance=self.platform_instance, + env=self.env, + ) + + +@attr.s(str=True) +class Urn(_Entity): + _urn: str = attr.ib() + + @_urn.validator + def _validate_urn(self, attribute, value): + if not value.startswith("urn:"): + raise ValueError("invalid urn provided: urns must start with 'urn:'") + if guess_entity_type(value) != "dataset": + # This is because DataJobs only support Dataset lineage. + raise ValueError("Airflow lineage currently only supports datasets") + + @property + def urn(self): + return self._urn diff --git a/metadata-ingestion/src/datahub_provider/example_dags/.airflowignore b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/.airflowignore similarity index 100% rename from metadata-ingestion/src/datahub_provider/example_dags/.airflowignore rename to metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/.airflowignore diff --git a/.github/workflows/docker-ingestion-base.yml b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/__init__.py similarity index 100% rename from .github/workflows/docker-ingestion-base.yml rename to metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/__init__.py diff --git a/metadata-ingestion/src/datahub_provider/example_dags/generic_recipe_sample_dag.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/generic_recipe_sample_dag.py similarity index 98% rename from metadata-ingestion/src/datahub_provider/example_dags/generic_recipe_sample_dag.py rename to metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/generic_recipe_sample_dag.py index d0e4aa944e840..ff8dba457066f 100644 --- a/metadata-ingestion/src/datahub_provider/example_dags/generic_recipe_sample_dag.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/generic_recipe_sample_dag.py @@ -9,7 +9,6 @@ from airflow import DAG from airflow.operators.python import PythonOperator from airflow.utils.dates import days_ago - from datahub.configuration.config_loader import load_config_file from datahub.ingestion.run.pipeline import Pipeline @@ -41,6 +40,7 @@ def datahub_recipe(): schedule_interval=timedelta(days=1), start_date=days_ago(2), catchup=False, + default_view="tree", ) as dag: ingest_task = PythonOperator( task_id="ingest_using_recipe", diff --git a/metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_demo.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_demo.py similarity index 94% rename from metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_demo.py rename to metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_demo.py index 95b594e4052a5..3caea093b932d 100644 --- a/metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_demo.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_demo.py @@ -9,7 +9,7 @@ from airflow.operators.bash import BashOperator from airflow.utils.dates import days_ago -from datahub_provider.entities import Dataset, Urn +from datahub_airflow_plugin.entities import Dataset, Urn default_args = { "owner": "airflow", @@ -28,6 +28,7 @@ start_date=days_ago(2), tags=["example_tag"], catchup=False, + default_view="tree", ) as dag: task1 = BashOperator( task_id="run_data_task", diff --git a/metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_taskflow_demo.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_taskflow_demo.py similarity index 94% rename from metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_taskflow_demo.py rename to metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_taskflow_demo.py index 1fe321eb5c80a..ceb0f452b540a 100644 --- a/metadata-ingestion/src/datahub_provider/example_dags/lineage_backend_taskflow_demo.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_backend_taskflow_demo.py @@ -8,7 +8,7 @@ from airflow.decorators import dag, task from airflow.utils.dates import days_ago -from datahub_provider.entities import Dataset, Urn +from datahub_airflow_plugin.entities import Dataset, Urn default_args = { "owner": "airflow", @@ -26,6 +26,7 @@ start_date=days_ago(2), tags=["example_tag"], catchup=False, + default_view="tree", ) def datahub_lineage_backend_taskflow_demo(): @task( diff --git a/metadata-ingestion/src/datahub_provider/example_dags/lineage_emission_dag.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_emission_dag.py similarity index 96% rename from metadata-ingestion/src/datahub_provider/example_dags/lineage_emission_dag.py rename to metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_emission_dag.py index 153464246cef7..f40295c6bb883 100644 --- a/metadata-ingestion/src/datahub_provider/example_dags/lineage_emission_dag.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/lineage_emission_dag.py @@ -5,12 +5,12 @@ from datetime import timedelta +import datahub.emitter.mce_builder as builder from airflow import DAG from airflow.providers.snowflake.operators.snowflake import SnowflakeOperator from airflow.utils.dates import days_ago -import datahub.emitter.mce_builder as builder -from datahub_provider.operators.datahub import DatahubEmitterOperator +from datahub_airflow_plugin.operators.datahub import DatahubEmitterOperator default_args = { "owner": "airflow", @@ -31,6 +31,7 @@ schedule_interval=timedelta(days=1), start_date=days_ago(2), catchup=False, + default_view="tree", ) as dag: # This example shows a SnowflakeOperator followed by a lineage emission. However, the # same DatahubEmitterOperator can be used to emit lineage in any context. diff --git a/metadata-ingestion/src/datahub_provider/example_dags/mysql_sample_dag.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/mysql_sample_dag.py similarity index 98% rename from metadata-ingestion/src/datahub_provider/example_dags/mysql_sample_dag.py rename to metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/mysql_sample_dag.py index 2c833e1425634..77b29711d7688 100644 --- a/metadata-ingestion/src/datahub_provider/example_dags/mysql_sample_dag.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/mysql_sample_dag.py @@ -47,6 +47,7 @@ def ingest_from_mysql(): start_date=datetime(2022, 1, 1), schedule_interval=timedelta(days=1), catchup=False, + default_view="tree", ) as dag: # While it is also possible to use the PythonOperator, we recommend using # the PythonVirtualenvOperator to ensure that there are no dependency diff --git a/metadata-ingestion/src/datahub_provider/example_dags/snowflake_sample_dag.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/snowflake_sample_dag.py similarity index 99% rename from metadata-ingestion/src/datahub_provider/example_dags/snowflake_sample_dag.py rename to metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/snowflake_sample_dag.py index c107bb479262c..30e63b68e459f 100644 --- a/metadata-ingestion/src/datahub_provider/example_dags/snowflake_sample_dag.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/snowflake_sample_dag.py @@ -57,6 +57,7 @@ def ingest_from_snowflake(snowflake_credentials, datahub_gms_server): start_date=datetime(2022, 1, 1), schedule_interval=timedelta(days=1), catchup=False, + default_view="tree", ) as dag: # This example pulls credentials from Airflow's connection store. # For this to work, you must have previously configured these connections in Airflow. diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/__init__.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py new file mode 100644 index 0000000000000..aed858c6c4df0 --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py @@ -0,0 +1,214 @@ +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union + +from airflow.exceptions import AirflowException +from airflow.hooks.base import BaseHook +from datahub.metadata.com.linkedin.pegasus2avro.mxe import ( + MetadataChangeEvent, + MetadataChangeProposal, +) + +if TYPE_CHECKING: + from airflow.models.connection import Connection + from datahub.emitter.kafka_emitter import DatahubKafkaEmitter + from datahub.emitter.rest_emitter import DatahubRestEmitter + from datahub.ingestion.sink.datahub_kafka import KafkaSinkConfig + + +class DatahubRestHook(BaseHook): + """ + Creates a DataHub Rest API connection used to send metadata to DataHub. + Takes the endpoint for your DataHub Rest API in the Server Endpoint(host) field. + + URI example: :: + + AIRFLOW_CONN_DATAHUB_REST_DEFAULT='datahub-rest://rest-endpoint' + + :param datahub_rest_conn_id: Reference to the DataHub Rest connection. + :type datahub_rest_conn_id: str + """ + + conn_name_attr = "datahub_rest_conn_id" + default_conn_name = "datahub_rest_default" + conn_type = "datahub_rest" + hook_name = "DataHub REST Server" + + def __init__(self, datahub_rest_conn_id: str = default_conn_name) -> None: + super().__init__() + self.datahub_rest_conn_id = datahub_rest_conn_id + + @staticmethod + def get_connection_form_widgets() -> Dict[str, Any]: + return {} + + @staticmethod + def get_ui_field_behaviour() -> Dict: + """Returns custom field behavior""" + return { + "hidden_fields": ["port", "schema", "login"], + "relabeling": { + "host": "Server Endpoint", + }, + } + + def _get_config(self) -> Tuple[str, Optional[str], Optional[int]]: + conn: "Connection" = self.get_connection(self.datahub_rest_conn_id) + + host = conn.host + if not host: + raise AirflowException("host parameter is required") + if conn.port: + if ":" in host: + raise AirflowException( + "host parameter should not contain a port number if the port is specified separately" + ) + host = f"{host}:{conn.port}" + password = conn.password + timeout_sec = conn.extra_dejson.get("timeout_sec") + return (host, password, timeout_sec) + + def make_emitter(self) -> "DatahubRestEmitter": + import datahub.emitter.rest_emitter + + return datahub.emitter.rest_emitter.DatahubRestEmitter(*self._get_config()) + + def emit_mces(self, mces: List[MetadataChangeEvent]) -> None: + emitter = self.make_emitter() + + for mce in mces: + emitter.emit_mce(mce) + + def emit_mcps(self, mcps: List[MetadataChangeProposal]) -> None: + emitter = self.make_emitter() + + for mce in mcps: + emitter.emit_mcp(mce) + + +class DatahubKafkaHook(BaseHook): + """ + Creates a DataHub Kafka connection used to send metadata to DataHub. + Takes your kafka broker in the Kafka Broker(host) field. + + URI example: :: + + AIRFLOW_CONN_DATAHUB_KAFKA_DEFAULT='datahub-kafka://kafka-broker' + + :param datahub_kafka_conn_id: Reference to the DataHub Kafka connection. + :type datahub_kafka_conn_id: str + """ + + conn_name_attr = "datahub_kafka_conn_id" + default_conn_name = "datahub_kafka_default" + conn_type = "datahub_kafka" + hook_name = "DataHub Kafka Sink" + + def __init__(self, datahub_kafka_conn_id: str = default_conn_name) -> None: + super().__init__() + self.datahub_kafka_conn_id = datahub_kafka_conn_id + + @staticmethod + def get_connection_form_widgets() -> Dict[str, Any]: + return {} + + @staticmethod + def get_ui_field_behaviour() -> Dict: + """Returns custom field behavior""" + return { + "hidden_fields": ["port", "schema", "login", "password"], + "relabeling": { + "host": "Kafka Broker", + }, + } + + def _get_config(self) -> "KafkaSinkConfig": + import datahub.ingestion.sink.datahub_kafka + + conn = self.get_connection(self.datahub_kafka_conn_id) + obj = conn.extra_dejson + obj.setdefault("connection", {}) + if conn.host is not None: + if "bootstrap" in obj["connection"]: + raise AirflowException( + "Kafka broker specified twice (present in host and extra)" + ) + obj["connection"]["bootstrap"] = ":".join( + map(str, filter(None, [conn.host, conn.port])) + ) + config = datahub.ingestion.sink.datahub_kafka.KafkaSinkConfig.parse_obj(obj) + return config + + def make_emitter(self) -> "DatahubKafkaEmitter": + import datahub.emitter.kafka_emitter + + sink_config = self._get_config() + return datahub.emitter.kafka_emitter.DatahubKafkaEmitter(sink_config) + + def emit_mces(self, mces: List[MetadataChangeEvent]) -> None: + emitter = self.make_emitter() + errors = [] + + def callback(exc, msg): + if exc: + errors.append(exc) + + for mce in mces: + emitter.emit_mce_async(mce, callback) + + emitter.flush() + + if errors: + raise AirflowException(f"failed to push some MCEs: {errors}") + + def emit_mcps(self, mcps: List[MetadataChangeProposal]) -> None: + emitter = self.make_emitter() + errors = [] + + def callback(exc, msg): + if exc: + errors.append(exc) + + for mcp in mcps: + emitter.emit_mcp_async(mcp, callback) + + emitter.flush() + + if errors: + raise AirflowException(f"failed to push some MCPs: {errors}") + + +class DatahubGenericHook(BaseHook): + """ + Emits Metadata Change Events using either the DatahubRestHook or the + DatahubKafkaHook. Set up a DataHub Rest or Kafka connection to use. + + :param datahub_conn_id: Reference to the DataHub connection. + :type datahub_conn_id: str + """ + + def __init__(self, datahub_conn_id: str) -> None: + super().__init__() + self.datahub_conn_id = datahub_conn_id + + def get_underlying_hook(self) -> Union[DatahubRestHook, DatahubKafkaHook]: + conn = self.get_connection(self.datahub_conn_id) + + # We need to figure out the underlying hook type. First check the + # conn_type. If that fails, attempt to guess using the conn id name. + if conn.conn_type == DatahubRestHook.conn_type: + return DatahubRestHook(self.datahub_conn_id) + elif conn.conn_type == DatahubKafkaHook.conn_type: + return DatahubKafkaHook(self.datahub_conn_id) + elif "rest" in self.datahub_conn_id: + return DatahubRestHook(self.datahub_conn_id) + elif "kafka" in self.datahub_conn_id: + return DatahubKafkaHook(self.datahub_conn_id) + else: + raise AirflowException( + f"DataHub cannot handle conn_type {conn.conn_type} in {conn}" + ) + + def make_emitter(self) -> Union["DatahubRestEmitter", "DatahubKafkaEmitter"]: + return self.get_underlying_hook().make_emitter() + + def emit_mces(self, mces: List[MetadataChangeEvent]) -> None: + return self.get_underlying_hook().emit_mces(mces) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/__init__.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/datahub.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/datahub.py new file mode 100644 index 0000000000000..c41bb2b2a1e37 --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/lineage/datahub.py @@ -0,0 +1,91 @@ +import json +from typing import TYPE_CHECKING, Dict, List, Optional + +from airflow.configuration import conf +from airflow.lineage.backend import LineageBackend + +from datahub_airflow_plugin._lineage_core import ( + DatahubBasicLineageConfig, + send_lineage_to_datahub, +) + +if TYPE_CHECKING: + from airflow.models.baseoperator import BaseOperator + + +class DatahubLineageConfig(DatahubBasicLineageConfig): + # If set to true, most runtime errors in the lineage backend will be + # suppressed and will not cause the overall task to fail. Note that + # configuration issues will still throw exceptions. + graceful_exceptions: bool = True + + +def get_lineage_config() -> DatahubLineageConfig: + """Load the lineage config from airflow.cfg.""" + + # The kwargs pattern is also used for secret backends. + kwargs_str = conf.get("lineage", "datahub_kwargs", fallback="{}") + kwargs = json.loads(kwargs_str) + + # Continue to support top-level datahub_conn_id config. + datahub_conn_id = conf.get("lineage", "datahub_conn_id", fallback=None) + if datahub_conn_id: + kwargs["datahub_conn_id"] = datahub_conn_id + + return DatahubLineageConfig.parse_obj(kwargs) + + +class DatahubLineageBackend(LineageBackend): + """ + Sends lineage data from tasks to DataHub. + + Configurable via ``airflow.cfg`` as follows: :: + + # For REST-based: + airflow connections add --conn-type 'datahub_rest' 'datahub_rest_default' --conn-host 'http://localhost:8080' + # For Kafka-based (standard Kafka sink config can be passed via extras): + airflow connections add --conn-type 'datahub_kafka' 'datahub_kafka_default' --conn-host 'broker:9092' --conn-extra '{}' + + [lineage] + backend = datahub_provider.lineage.datahub.DatahubLineageBackend + datahub_kwargs = { + "datahub_conn_id": "datahub_rest_default", + "capture_ownership_info": true, + "capture_tags_info": true, + "graceful_exceptions": true } + # The above indentation is important! + """ + + def __init__(self) -> None: + super().__init__() + + # By attempting to get and parse the config, we can detect configuration errors + # ahead of time. The init method is only called in Airflow 2.x. + _ = get_lineage_config() + + # With Airflow 2.0, this can be an instance method. However, with Airflow 1.10.x, this + # method is used statically, even though LineageBackend declares it as an instance variable. + @staticmethod + def send_lineage( + operator: "BaseOperator", + inlets: Optional[List] = None, # unused + outlets: Optional[List] = None, # unused + context: Optional[Dict] = None, + ) -> None: + config = get_lineage_config() + if not config.enabled: + return + + try: + context = context or {} # ensure not None to satisfy mypy + send_lineage_to_datahub( + config, operator, operator.inlets, operator.outlets, context + ) + except Exception as e: + if config.graceful_exceptions: + operator.log.error(e) + operator.log.info( + "Suppressing error because graceful_exceptions is set" + ) + else: + raise diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/__init__.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub.py new file mode 100644 index 0000000000000..109e7ddfe4dfa --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub.py @@ -0,0 +1,63 @@ +from typing import List, Union + +from airflow.models import BaseOperator +from airflow.utils.decorators import apply_defaults +from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent + +from datahub_airflow_plugin.hooks.datahub import ( + DatahubGenericHook, + DatahubKafkaHook, + DatahubRestHook, +) + + +class DatahubBaseOperator(BaseOperator): + """ + The DatahubBaseOperator is used as a base operator all DataHub operators. + """ + + ui_color = "#4398c8" + + hook: Union[DatahubRestHook, DatahubKafkaHook] + + # mypy is not a fan of this. Newer versions of Airflow support proper typing for the decorator + # using PEP 612. However, there is not yet a good way to inherit the types of the kwargs from + # the superclass. + @apply_defaults # type: ignore[misc] + def __init__( # type: ignore[no-untyped-def] + self, + *, + datahub_conn_id: str, + **kwargs, + ): + super().__init__(**kwargs) + + self.datahub_conn_id = datahub_conn_id + self.generic_hook = DatahubGenericHook(datahub_conn_id) + + +class DatahubEmitterOperator(DatahubBaseOperator): + """ + Emits a Metadata Change Event to DataHub using either a DataHub + Rest or Kafka connection. + + :param datahub_conn_id: Reference to the DataHub Rest or Kafka Connection. + :type datahub_conn_id: str + """ + + # See above for why these mypy type issues are ignored here. + @apply_defaults # type: ignore[misc] + def __init__( # type: ignore[no-untyped-def] + self, + mces: List[MetadataChangeEvent], + datahub_conn_id: str, + **kwargs, + ): + super().__init__( + datahub_conn_id=datahub_conn_id, + **kwargs, + ) + self.mces = mces + + def execute(self, context): + self.generic_hook.get_underlying_hook().emit_mces(self.mces) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_assertion_operator.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_assertion_operator.py new file mode 100644 index 0000000000000..6f93c09a9e287 --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_assertion_operator.py @@ -0,0 +1,78 @@ +import datetime +from typing import Any, List, Optional, Sequence, Union + +from airflow.models import BaseOperator +from datahub.api.circuit_breaker import ( + AssertionCircuitBreaker, + AssertionCircuitBreakerConfig, +) + +from datahub_airflow_plugin.hooks.datahub import DatahubRestHook + + +class DataHubAssertionOperator(BaseOperator): + r""" + DataHub Assertion Circuit Breaker Operator. + + :param urn: The DataHub dataset unique identifier. (templated) + :param datahub_rest_conn_id: The REST datahub connection id to communicate with DataHub + which is set as Airflow connection. + :param check_last_assertion_time: If set it checks assertions after the last operation was set on the dataset. + By default it is True. + :param time_delta: If verify_after_last_update is False it checks for assertion within the time delta. + """ + + template_fields: Sequence[str] = ("urn",) + circuit_breaker: AssertionCircuitBreaker + urn: Union[List[str], str] + + def __init__( # type: ignore[no-untyped-def] + self, + *, + urn: Union[List[str], str], + datahub_rest_conn_id: Optional[str] = None, + check_last_assertion_time: bool = True, + time_delta: Optional[datetime.timedelta] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + hook: DatahubRestHook + if datahub_rest_conn_id is not None: + hook = DatahubRestHook(datahub_rest_conn_id=datahub_rest_conn_id) + else: + hook = DatahubRestHook() + + host, password, timeout_sec = hook._get_config() + self.urn = urn + config: AssertionCircuitBreakerConfig = AssertionCircuitBreakerConfig( + datahub_host=host, + datahub_token=password, + timeout=timeout_sec, + verify_after_last_update=check_last_assertion_time, + time_delta=time_delta if time_delta else datetime.timedelta(days=1), + ) + + self.circuit_breaker = AssertionCircuitBreaker(config=config) + + def execute(self, context: Any) -> bool: + if "datahub_silence_circuit_breakers" in context["dag_run"].conf: + self.log.info( + "Circuit breaker is silenced because datahub_silence_circuit_breakers config is set" + ) + return True + + self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") + if isinstance(self.urn, str): + urns = [self.urn] + elif isinstance(self.urn, list): + urns = self.urn + else: + raise Exception(f"urn parameter has invalid type {type(self.urn)}") + + for urn in urns: + self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") + ret = self.circuit_breaker.is_circuit_breaker_active(urn=urn) + if ret: + raise Exception(f"Dataset {self.urn} is not in consumable state") + + return True diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_assertion_sensor.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_assertion_sensor.py new file mode 100644 index 0000000000000..16e5d1cbe8b1f --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_assertion_sensor.py @@ -0,0 +1,78 @@ +import datetime +from typing import Any, List, Optional, Sequence, Union + +from airflow.sensors.base import BaseSensorOperator +from datahub.api.circuit_breaker import ( + AssertionCircuitBreaker, + AssertionCircuitBreakerConfig, +) + +from datahub_airflow_plugin.hooks.datahub import DatahubRestHook + + +class DataHubAssertionSensor(BaseSensorOperator): + r""" + DataHub Assertion Circuit Breaker Sensor. + + :param urn: The DataHub dataset unique identifier. (templated) + :param datahub_rest_conn_id: The REST datahub connection id to communicate with DataHub + which is set as Airflow connection. + :param check_last_assertion_time: If set it checks assertions after the last operation was set on the dataset. + By default it is True. + :param time_delta: If verify_after_last_update is False it checks for assertion within the time delta. + """ + + template_fields: Sequence[str] = ("urn",) + circuit_breaker: AssertionCircuitBreaker + urn: Union[List[str], str] + + def __init__( # type: ignore[no-untyped-def] + self, + *, + urn: Union[List[str], str], + datahub_rest_conn_id: Optional[str] = None, + check_last_assertion_time: bool = True, + time_delta: datetime.timedelta = datetime.timedelta(days=1), + **kwargs, + ) -> None: + super().__init__(**kwargs) + hook: DatahubRestHook + if datahub_rest_conn_id is not None: + hook = DatahubRestHook(datahub_rest_conn_id=datahub_rest_conn_id) + else: + hook = DatahubRestHook() + + host, password, timeout_sec = hook._get_config() + self.urn = urn + config: AssertionCircuitBreakerConfig = AssertionCircuitBreakerConfig( + datahub_host=host, + datahub_token=password, + timeout=timeout_sec, + verify_after_last_update=check_last_assertion_time, + time_delta=time_delta, + ) + self.circuit_breaker = AssertionCircuitBreaker(config=config) + + def poke(self, context: Any) -> bool: + if "datahub_silence_circuit_breakers" in context["dag_run"].conf: + self.log.info( + "Circuit breaker is silenced because datahub_silence_circuit_breakers config is set" + ) + return True + + self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") + if isinstance(self.urn, str): + urns = [self.urn] + elif isinstance(self.urn, list): + urns = self.urn + else: + raise Exception(f"urn parameter has invalid type {type(self.urn)}") + + for urn in urns: + self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") + ret = self.circuit_breaker.is_circuit_breaker_active(urn=urn) + if ret: + self.log.info(f"Dataset {self.urn} is not in consumable state") + return False + + return True diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_operation_operator.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_operation_operator.py new file mode 100644 index 0000000000000..94e105309537b --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_operation_operator.py @@ -0,0 +1,97 @@ +import datetime +from typing import Any, List, Optional, Sequence, Union + +from airflow.sensors.base import BaseSensorOperator +from datahub.api.circuit_breaker import ( + OperationCircuitBreaker, + OperationCircuitBreakerConfig, +) + +from datahub_airflow_plugin.hooks.datahub import DatahubRestHook + + +class DataHubOperationCircuitBreakerOperator(BaseSensorOperator): + r""" + DataHub Operation Circuit Breaker Operator. + + :param urn: The DataHub dataset unique identifier. (templated) + :param datahub_rest_conn_id: The REST datahub connection id to communicate with DataHub + which is set as Airflow connection. + :param partition: The partition to check the operation. + :param source_type: The partition to check the operation. :ref:`https://datahubproject.io/docs/graphql/enums#operationsourcetype` + + """ + + template_fields: Sequence[str] = ( + "urn", + "partition", + "source_type", + "operation_type", + ) + circuit_breaker: OperationCircuitBreaker + urn: Union[List[str], str] + partition: Optional[str] + source_type: Optional[str] + operation_type: Optional[str] + + def __init__( # type: ignore[no-untyped-def] + self, + *, + urn: Union[List[str], str], + datahub_rest_conn_id: Optional[str] = None, + time_delta: Optional[datetime.timedelta] = datetime.timedelta(days=1), + partition: Optional[str] = None, + source_type: Optional[str] = None, + operation_type: Optional[str] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + hook: DatahubRestHook + if datahub_rest_conn_id is not None: + hook = DatahubRestHook(datahub_rest_conn_id=datahub_rest_conn_id) + else: + hook = DatahubRestHook() + + host, password, timeout_sec = hook._get_config() + + self.urn = urn + self.partition = partition + self.operation_type = operation_type + self.source_type = source_type + + config: OperationCircuitBreakerConfig = OperationCircuitBreakerConfig( + datahub_host=host, + datahub_token=password, + timeout=timeout_sec, + time_delta=time_delta, + ) + + self.circuit_breaker = OperationCircuitBreaker(config=config) + + def execute(self, context: Any) -> bool: + if "datahub_silence_circuit_breakers" in context["dag_run"].conf: + self.log.info( + "Circuit breaker is silenced because datahub_silence_circuit_breakers config is set" + ) + return True + + self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") + if isinstance(self.urn, str): + urns = [self.urn] + elif isinstance(self.urn, list): + urns = self.urn + else: + raise Exception(f"urn parameter has invalid type {type(self.urn)}") + + for urn in urns: + self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") + ret = self.circuit_breaker.is_circuit_breaker_active( + urn=urn, + partition=self.partition, + operation_type=self.operation_type, + source_type=self.source_type, + ) + if ret: + raise Exception(f"Dataset {self.urn} is not in consumable state") + + return True diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_operation_sensor.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_operation_sensor.py new file mode 100644 index 0000000000000..434c60754064d --- /dev/null +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/operators/datahub_operation_sensor.py @@ -0,0 +1,100 @@ +import datetime +from typing import Any, List, Optional, Sequence, Union + +from airflow.sensors.base import BaseSensorOperator +from datahub.api.circuit_breaker import ( + OperationCircuitBreaker, + OperationCircuitBreakerConfig, +) + +from datahub_airflow_plugin.hooks.datahub import DatahubRestHook + + +class DataHubOperationCircuitBreakerSensor(BaseSensorOperator): + r""" + DataHub Operation Circuit Breaker Sensor. + + :param urn: The DataHub dataset unique identifier. (templated) + :param datahub_rest_conn_id: The REST datahub connection id to communicate with DataHub + which is set as Airflow connection. + :param partition: The partition to check the operation. + :param source_type: The source type to filter on. If not set it will accept any source type. + See valid values at: https://datahubproject.io/docs/graphql/enums#operationsourcetype + :param operation_type: The operation type to filter on. If not set it will accept any source type. + See valid values at: https://datahubproject.io/docs/graphql/enums/#operationtype + """ + + template_fields: Sequence[str] = ( + "urn", + "partition", + "source_type", + "operation_type", + ) + circuit_breaker: OperationCircuitBreaker + urn: Union[List[str], str] + partition: Optional[str] + source_type: Optional[str] + operation_type: Optional[str] + + def __init__( # type: ignore[no-untyped-def] + self, + *, + urn: Union[List[str], str], + datahub_rest_conn_id: Optional[str] = None, + time_delta: Optional[datetime.timedelta] = datetime.timedelta(days=1), + partition: Optional[str] = None, + source_type: Optional[str] = None, + operation_type: Optional[str] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + hook: DatahubRestHook + if datahub_rest_conn_id is not None: + hook = DatahubRestHook(datahub_rest_conn_id=datahub_rest_conn_id) + else: + hook = DatahubRestHook() + + host, password, timeout_sec = hook._get_config() + + self.urn = urn + self.partition = partition + self.operation_type = operation_type + self.source_type = source_type + + config: OperationCircuitBreakerConfig = OperationCircuitBreakerConfig( + datahub_host=host, + datahub_token=password, + timeout=timeout_sec, + time_delta=time_delta, + ) + + self.circuit_breaker = OperationCircuitBreaker(config=config) + + def poke(self, context: Any) -> bool: + if "datahub_silence_circuit_breakers" in context["dag_run"].conf: + self.log.info( + "Circuit breaker is silenced because datahub_silence_circuit_breakers config is set" + ) + return True + + self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") + if isinstance(self.urn, str): + urns = [self.urn] + elif isinstance(self.urn, list): + urns = self.urn + else: + raise Exception(f"urn parameter has invalid type {type(self.urn)}") + + for urn in urns: + self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") + ret = self.circuit_breaker.is_circuit_breaker_active( + urn=urn, + partition=self.partition, + operation_type=self.operation_type, + source_type=self.source_type, + ) + if ret: + self.log.info(f"Dataset {self.urn} is not in consumable state") + return False + + return True diff --git a/metadata-ingestion/tests/unit/test_airflow.py b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py similarity index 97% rename from metadata-ingestion/tests/unit/test_airflow.py rename to metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py index 980dc5550fafa..9aa901171cfa6 100644 --- a/metadata-ingestion/tests/unit/test_airflow.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py @@ -9,12 +9,11 @@ import airflow.configuration import airflow.version +import datahub.emitter.mce_builder as builder import packaging.version import pytest from airflow.lineage import apply_lineage, prepare_lineage from airflow.models import DAG, Connection, DagBag, DagRun, TaskInstance - -import datahub.emitter.mce_builder as builder from datahub_provider import get_provider_info from datahub_provider._airflow_shims import AIRFLOW_PATCHED, EmptyOperator from datahub_provider.entities import Dataset, Urn @@ -23,7 +22,7 @@ assert AIRFLOW_PATCHED -pytestmark = pytest.mark.airflow +# TODO: Remove default_view="tree" arg. Figure out why is default_view being picked as "grid" and how to fix it ? # Approach suggested by https://stackoverflow.com/a/11887885/5004662. AIRFLOW_VERSION = packaging.version.parse(airflow.version.version) @@ -75,7 +74,7 @@ def test_airflow_provider_info(): @pytest.mark.filterwarnings("ignore:.*is deprecated.*") def test_dags_load_with_no_errors(pytestconfig: pytest.Config) -> None: airflow_examples_folder = ( - pytestconfig.rootpath / "src/datahub_provider/example_dags" + pytestconfig.rootpath / "src/datahub_airflow_plugin/example_dags" ) # Note: the .airflowignore file skips the snowflake DAG. @@ -233,7 +232,11 @@ def test_lineage_backend(mock_emit, inlets, outlets, capture_executions): func = mock.Mock() func.__name__ = "foo" - dag = DAG(dag_id="test_lineage_is_sent_to_backend", start_date=DEFAULT_DATE) + dag = DAG( + dag_id="test_lineage_is_sent_to_backend", + start_date=DEFAULT_DATE, + default_view="tree", + ) with dag: op1 = EmptyOperator( @@ -252,6 +255,7 @@ def test_lineage_backend(mock_emit, inlets, outlets, capture_executions): # versions do not require it, but will attempt to find the associated # run_id in the database if execution_date is provided. As such, we # must fake the run_id parameter for newer Airflow versions. + # We need to add type:ignore in else to suppress mypy error in Airflow < 2.2 if AIRFLOW_VERSION < packaging.version.parse("2.2.0"): ti = TaskInstance(task=op2, execution_date=DEFAULT_DATE) # Ignoring type here because DagRun state is just a sring at Airflow 1 @@ -259,7 +263,7 @@ def test_lineage_backend(mock_emit, inlets, outlets, capture_executions): else: from airflow.utils.state import DagRunState - ti = TaskInstance(task=op2, run_id=f"test_airflow-{DEFAULT_DATE}") + ti = TaskInstance(task=op2, run_id=f"test_airflow-{DEFAULT_DATE}") # type: ignore[call-arg] dag_run = DagRun( state=DagRunState.SUCCESS, run_id=f"scheduled_{DEFAULT_DATE.isoformat()}", diff --git a/metadata-ingestion/developing.md b/metadata-ingestion/developing.md index 5d49b9a866a3d..f529590e2ab39 100644 --- a/metadata-ingestion/developing.md +++ b/metadata-ingestion/developing.md @@ -26,6 +26,16 @@ source venv/bin/activate datahub version # should print "DataHub CLI version: unavailable (installed in develop mode)" ``` +### (Optional) Set up your Python environment for developing on Airflow Plugin + +From the repository root: + +```shell +cd metadata-ingestion-modules/airflow-plugin +../../gradlew :metadata-ingestion-modules:airflow-plugin:installDev +source venv/bin/activate +datahub version # should print "DataHub CLI version: unavailable (installed in develop mode)" +``` ### Common setup issues Common issues (click to expand): @@ -183,7 +193,7 @@ pytest -m 'slow_integration' ../gradlew :metadata-ingestion:testFull ../gradlew :metadata-ingestion:check # Run all tests in a single file -../gradlew :metadata-ingestion:testSingle -PtestFile=tests/unit/test_airflow.py +../gradlew :metadata-ingestion:testSingle -PtestFile=tests/unit/test_bigquery_source.py # Run all tests under tests/unit ../gradlew :metadata-ingestion:testSingle -PtestFile=tests/unit ``` diff --git a/metadata-ingestion/schedule_docs/airflow.md b/metadata-ingestion/schedule_docs/airflow.md index e48710964b01c..95393c3cc9919 100644 --- a/metadata-ingestion/schedule_docs/airflow.md +++ b/metadata-ingestion/schedule_docs/airflow.md @@ -4,9 +4,9 @@ If you are using Apache Airflow for your scheduling then you might want to also We've provided a few examples of how to configure your DAG: -- [`mysql_sample_dag`](../src/datahub_provider/example_dags/mysql_sample_dag.py) embeds the full MySQL ingestion configuration inside the DAG. +- [`mysql_sample_dag`](../../metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/mysql_sample_dag.py) embeds the full MySQL ingestion configuration inside the DAG. -- [`snowflake_sample_dag`](../src/datahub_provider/example_dags/snowflake_sample_dag.py) avoids embedding credentials inside the recipe, and instead fetches them from Airflow's [Connections](https://airflow.apache.org/docs/apache-airflow/stable/howto/connection/index.html) feature. You must configure your connections in Airflow to use this approach. +- [`snowflake_sample_dag`](../../metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/snowflake_sample_dag.py) avoids embedding credentials inside the recipe, and instead fetches them from Airflow's [Connections](https://airflow.apache.org/docs/apache-airflow/stable/howto/connection/index.html) feature. You must configure your connections in Airflow to use this approach. :::tip @@ -37,6 +37,6 @@ In more advanced cases, you might want to store your ingestion recipe in a file - Create a DAG task to read your DataHub ingestion recipe file and run it. See the example below for reference. - Deploy the DAG file into airflow for scheduling. Typically this involves checking in the DAG file into your dags folder which is accessible to your Airflow instance. -Example: [`generic_recipe_sample_dag`](../src/datahub_provider/example_dags/generic_recipe_sample_dag.py) +Example: [`generic_recipe_sample_dag`](../../metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/example_dags/generic_recipe_sample_dag.py) diff --git a/metadata-ingestion/setup.cfg b/metadata-ingestion/setup.cfg index 59d847395ec47..fad55b99ec938 100644 --- a/metadata-ingestion/setup.cfg +++ b/metadata-ingestion/setup.cfg @@ -75,7 +75,6 @@ disallow_untyped_defs = yes asyncio_mode = auto addopts = --cov=src --cov-report= --cov-config setup.cfg --strict-markers markers = - airflow: marks tests related to airflow (deselect with '-m not airflow') slow_unit: marks tests to only run slow unit tests (deselect with '-m not slow_unit') integration: marks tests to only run in integration (deselect with '-m "not integration"') integration_batch_1: mark tests to only run in batch 1 of integration tests. This is done mainly for parallelisation (deselect with '-m not integration_batch_1') @@ -112,5 +111,3 @@ exclude_lines = omit = # omit codegen src/datahub/metadata/* - # omit example dags - src/datahub_provider/example_dags/* diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index f0b66f8bbfb96..aa01882a44aa6 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -283,8 +283,7 @@ def get_long_description(): }, # Integrations. "airflow": { - "apache-airflow >= 2.0.2", - *rest_common, + f"acryl-datahub-airflow-plugin == {package_metadata['__version__']}", }, "circuit-breaker": { "gql>=3.3.0", @@ -508,8 +507,8 @@ def get_long_description(): "salesforce", "unity-catalog", "nifi", - "vertica" - # airflow is added below + "vertica", + "mode", ] if plugin for dependency in plugins[plugin] @@ -518,9 +517,6 @@ def get_long_description(): dev_requirements = { *base_dev_requirements, - # Extra requirements for Airflow. - "apache-airflow[snowflake]>=2.0.2", # snowflake is used in example dags - "virtualenv", # needed by PythonVirtualenvOperator } full_test_dev_requirements = { diff --git a/metadata-ingestion/src/datahub_provider/__init__.py b/metadata-ingestion/src/datahub_provider/__init__.py index 4c0b2bd8e714e..306076dadf82b 100644 --- a/metadata-ingestion/src/datahub_provider/__init__.py +++ b/metadata-ingestion/src/datahub_provider/__init__.py @@ -1,28 +1 @@ -import datahub - - -# This is needed to allow Airflow to pick up specific metadata fields it needs for -# certain features. We recognize it's a bit unclean to define these in multiple places, -# but at this point it's the only workaround if you'd like your custom conn type to -# show up in the Airflow UI. -def get_provider_info(): - return { - "name": "DataHub", - "description": "`DataHub `__\n", - "connection-types": [ - { - "hook-class-name": "datahub_provider.hooks.datahub.DatahubRestHook", - "connection-type": "datahub_rest", - }, - { - "hook-class-name": "datahub_provider.hooks.datahub.DatahubKafkaHook", - "connection-type": "datahub_kafka", - }, - ], - "hook-class-names": [ - "datahub_provider.hooks.datahub.DatahubRestHook", - "datahub_provider.hooks.datahub.DatahubKafkaHook", - ], - "package-name": datahub.__package_name__, - "versions": [datahub.__version__], - } +from datahub_airflow_plugin import get_provider_info diff --git a/metadata-ingestion/src/datahub_provider/_airflow_compat.py b/metadata-ingestion/src/datahub_provider/_airflow_compat.py index 67c3348ec987c..98b96e32fee78 100644 --- a/metadata-ingestion/src/datahub_provider/_airflow_compat.py +++ b/metadata-ingestion/src/datahub_provider/_airflow_compat.py @@ -1,12 +1,3 @@ -# This module must be imported before any Airflow imports in any of our files. -# The AIRFLOW_PATCHED just helps avoid flake8 errors. +from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED -from datahub.utilities._markupsafe_compat import MARKUPSAFE_PATCHED - -assert MARKUPSAFE_PATCHED - -AIRFLOW_PATCHED = True - -__all__ = [ - "AIRFLOW_PATCHED", -] +__all__ = ["AIRFLOW_PATCHED"] diff --git a/metadata-ingestion/src/datahub_provider/_airflow_shims.py b/metadata-ingestion/src/datahub_provider/_airflow_shims.py index 31e1237c0d21d..d5e4a019a4b81 100644 --- a/metadata-ingestion/src/datahub_provider/_airflow_shims.py +++ b/metadata-ingestion/src/datahub_provider/_airflow_shims.py @@ -1,29 +1,15 @@ -from datahub_provider._airflow_compat import AIRFLOW_PATCHED - -from airflow.models.baseoperator import BaseOperator - -try: - from airflow.models.mappedoperator import MappedOperator - from airflow.models.operator import Operator - from airflow.operators.empty import EmptyOperator -except ModuleNotFoundError: - # Operator isn't a real class, but rather a type alias defined - # as the union of BaseOperator and MappedOperator. - # Since older versions of Airflow don't have MappedOperator, we can just use BaseOperator. - Operator = BaseOperator # type: ignore - MappedOperator = None # type: ignore - from airflow.operators.dummy import DummyOperator as EmptyOperator # type: ignore - -try: - from airflow.sensors.external_task import ExternalTaskSensor -except ImportError: - from airflow.sensors.external_task_sensor import ExternalTaskSensor # type: ignore - -assert AIRFLOW_PATCHED +from datahub_airflow_plugin._airflow_shims import ( + AIRFLOW_PATCHED, + EmptyOperator, + ExternalTaskSensor, + MappedOperator, + Operator, +) __all__ = [ - "Operator", - "MappedOperator", + "AIRFLOW_PATCHED", "EmptyOperator", "ExternalTaskSensor", + "Operator", + "MappedOperator", ] diff --git a/metadata-ingestion/src/datahub_provider/_lineage_core.py b/metadata-ingestion/src/datahub_provider/_lineage_core.py index 07c70eeca4e6d..4305b39cac684 100644 --- a/metadata-ingestion/src/datahub_provider/_lineage_core.py +++ b/metadata-ingestion/src/datahub_provider/_lineage_core.py @@ -1,114 +1,3 @@ -from datetime import datetime -from typing import TYPE_CHECKING, Dict, List +from datahub_airflow_plugin._lineage_core import DatahubBasicLineageConfig -import datahub.emitter.mce_builder as builder -from datahub.api.entities.dataprocess.dataprocess_instance import InstanceRunResult -from datahub.configuration.common import ConfigModel -from datahub.utilities.urns.dataset_urn import DatasetUrn -from datahub_provider.client.airflow_generator import AirflowGenerator -from datahub_provider.entities import _Entity - -if TYPE_CHECKING: - from airflow import DAG - from airflow.models.dagrun import DagRun - from airflow.models.taskinstance import TaskInstance - - from datahub_provider._airflow_shims import Operator - from datahub_provider.hooks.datahub import DatahubGenericHook - - -def _entities_to_urn_list(iolets: List[_Entity]) -> List[DatasetUrn]: - return [DatasetUrn.create_from_string(let.urn) for let in iolets] - - -class DatahubBasicLineageConfig(ConfigModel): - enabled: bool = True - - # DataHub hook connection ID. - datahub_conn_id: str - - # Cluster to associate with the pipelines and tasks. Defaults to "prod". - cluster: str = builder.DEFAULT_FLOW_CLUSTER - - # If true, the owners field of the DAG will be capture as a DataHub corpuser. - capture_ownership_info: bool = True - - # If true, the tags field of the DAG will be captured as DataHub tags. - capture_tags_info: bool = True - - capture_executions: bool = False - - def make_emitter_hook(self) -> "DatahubGenericHook": - # This is necessary to avoid issues with circular imports. - from datahub_provider.hooks.datahub import DatahubGenericHook - - return DatahubGenericHook(self.datahub_conn_id) - - -def send_lineage_to_datahub( - config: DatahubBasicLineageConfig, - operator: "Operator", - inlets: List[_Entity], - outlets: List[_Entity], - context: Dict, -) -> None: - if not config.enabled: - return - - dag: "DAG" = context["dag"] - task: "Operator" = context["task"] - ti: "TaskInstance" = context["task_instance"] - - hook = config.make_emitter_hook() - emitter = hook.make_emitter() - - dataflow = AirflowGenerator.generate_dataflow( - cluster=config.cluster, - dag=dag, - capture_tags=config.capture_tags_info, - capture_owner=config.capture_ownership_info, - ) - dataflow.emit(emitter) - operator.log.info(f"Emitted from Lineage: {dataflow}") - - datajob = AirflowGenerator.generate_datajob( - cluster=config.cluster, - task=task, - dag=dag, - capture_tags=config.capture_tags_info, - capture_owner=config.capture_ownership_info, - ) - datajob.inlets.extend(_entities_to_urn_list(inlets)) - datajob.outlets.extend(_entities_to_urn_list(outlets)) - - datajob.emit(emitter) - operator.log.info(f"Emitted from Lineage: {datajob}") - - if config.capture_executions: - dag_run: "DagRun" = context["dag_run"] - - dpi = AirflowGenerator.run_datajob( - emitter=emitter, - cluster=config.cluster, - ti=ti, - dag=dag, - dag_run=dag_run, - datajob=datajob, - emit_templates=False, - ) - - operator.log.info(f"Emitted from Lineage: {dpi}") - - dpi = AirflowGenerator.complete_datajob( - emitter=emitter, - cluster=config.cluster, - ti=ti, - dag=dag, - dag_run=dag_run, - datajob=datajob, - result=InstanceRunResult.SUCCESS, - end_timestamp_millis=int(datetime.utcnow().timestamp() * 1000), - ) - operator.log.info(f"Emitted from Lineage: {dpi}") - - emitter.flush() +__all__ = ["DatahubBasicLineageConfig"] diff --git a/metadata-ingestion/src/datahub_provider/_plugin.py b/metadata-ingestion/src/datahub_provider/_plugin.py index ed2e4e1c93d80..3d74e715bd644 100644 --- a/metadata-ingestion/src/datahub_provider/_plugin.py +++ b/metadata-ingestion/src/datahub_provider/_plugin.py @@ -1,368 +1,3 @@ -from datahub_provider._airflow_compat import AIRFLOW_PATCHED +from datahub_airflow_plugin.datahub_plugin import DatahubPlugin -import contextlib -import logging -import traceback -from typing import Any, Callable, Iterable, List, Optional, Union - -from airflow.configuration import conf -from airflow.lineage import PIPELINE_OUTLETS -from airflow.models.baseoperator import BaseOperator -from airflow.plugins_manager import AirflowPlugin -from airflow.utils.module_loading import import_string -from cattr import structure - -from datahub.api.entities.dataprocess.dataprocess_instance import InstanceRunResult -from datahub_provider._airflow_shims import MappedOperator, Operator -from datahub_provider.client.airflow_generator import AirflowGenerator -from datahub_provider.hooks.datahub import DatahubGenericHook -from datahub_provider.lineage.datahub import DatahubLineageConfig - -assert AIRFLOW_PATCHED -logger = logging.getLogger(__name__) - -TASK_ON_FAILURE_CALLBACK = "on_failure_callback" -TASK_ON_SUCCESS_CALLBACK = "on_success_callback" - - -def get_lineage_config() -> DatahubLineageConfig: - """Load the lineage config from airflow.cfg.""" - - enabled = conf.get("datahub", "enabled", fallback=True) - datahub_conn_id = conf.get("datahub", "conn_id", fallback="datahub_rest_default") - cluster = conf.get("datahub", "cluster", fallback="prod") - graceful_exceptions = conf.get("datahub", "graceful_exceptions", fallback=True) - capture_tags_info = conf.get("datahub", "capture_tags_info", fallback=True) - capture_ownership_info = conf.get( - "datahub", "capture_ownership_info", fallback=True - ) - capture_executions = conf.get("datahub", "capture_executions", fallback=True) - return DatahubLineageConfig( - enabled=enabled, - datahub_conn_id=datahub_conn_id, - cluster=cluster, - graceful_exceptions=graceful_exceptions, - capture_ownership_info=capture_ownership_info, - capture_tags_info=capture_tags_info, - capture_executions=capture_executions, - ) - - -def _task_inlets(operator: "Operator") -> List: - # From Airflow 2.4 _inlets is dropped and inlets used consistently. Earlier it was not the case, so we have to stick there to _inlets - if hasattr(operator, "_inlets"): - return operator._inlets # type: ignore[attr-defined, union-attr] - return operator.inlets - - -def _task_outlets(operator: "Operator") -> List: - # From Airflow 2.4 _outlets is dropped and inlets used consistently. Earlier it was not the case, so we have to stick there to _outlets - # We have to use _outlets because outlets is empty in Airflow < 2.4.0 - if hasattr(operator, "_outlets"): - return operator._outlets # type: ignore[attr-defined, union-attr] - return operator.outlets - - -def get_inlets_from_task(task: BaseOperator, context: Any) -> Iterable[Any]: - # TODO: Fix for https://github.com/apache/airflow/commit/1b1f3fabc5909a447a6277cafef3a0d4ef1f01ae - # in Airflow 2.4. - # TODO: ignore/handle airflow's dataset type in our lineage - - inlets: List[Any] = [] - task_inlets = _task_inlets(task) - # From Airflow 2.3 this should be AbstractOperator but due to compatibility reason lets use BaseOperator - if isinstance(task_inlets, (str, BaseOperator)): - inlets = [ - task_inlets, - ] - - if task_inlets and isinstance(task_inlets, list): - inlets = [] - task_ids = ( - {o for o in task_inlets if isinstance(o, str)} - .union(op.task_id for op in task_inlets if isinstance(op, BaseOperator)) - .intersection(task.get_flat_relative_ids(upstream=True)) - ) - - from airflow.lineage import AUTO - - # pick up unique direct upstream task_ids if AUTO is specified - if AUTO.upper() in task_inlets or AUTO.lower() in task_inlets: - print("Picking up unique direct upstream task_ids as AUTO is specified") - task_ids = task_ids.union( - task_ids.symmetric_difference(task.upstream_task_ids) - ) - - inlets = task.xcom_pull( - context, task_ids=list(task_ids), dag_id=task.dag_id, key=PIPELINE_OUTLETS - ) - - # re-instantiate the obtained inlets - inlets = [ - structure(item["data"], import_string(item["type_name"])) - # _get_instance(structure(item, Metadata)) - for sublist in inlets - if sublist - for item in sublist - ] - - for inlet in task_inlets: - if not isinstance(inlet, str): - inlets.append(inlet) - - return inlets - - -def _make_emit_callback( - logger: logging.Logger, -) -> Callable[[Optional[Exception], str], None]: - def emit_callback(err: Optional[Exception], msg: str) -> None: - if err: - logger.error(f"Error sending metadata to datahub: {msg}", exc_info=err) - - return emit_callback - - -def datahub_task_status_callback(context, status): - ti = context["ti"] - task: "BaseOperator" = ti.task - dag = context["dag"] - - # This code is from the original airflow lineage code -> - # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py - inlets = get_inlets_from_task(task, context) - - emitter = ( - DatahubGenericHook(context["_datahub_config"].datahub_conn_id) - .get_underlying_hook() - .make_emitter() - ) - - dataflow = AirflowGenerator.generate_dataflow( - cluster=context["_datahub_config"].cluster, - dag=dag, - capture_tags=context["_datahub_config"].capture_tags_info, - capture_owner=context["_datahub_config"].capture_ownership_info, - ) - task.log.info(f"Emitting Datahub Dataflow: {dataflow}") - dataflow.emit(emitter, callback=_make_emit_callback(task.log)) - - datajob = AirflowGenerator.generate_datajob( - cluster=context["_datahub_config"].cluster, - task=task, - dag=dag, - capture_tags=context["_datahub_config"].capture_tags_info, - capture_owner=context["_datahub_config"].capture_ownership_info, - ) - - for inlet in inlets: - datajob.inlets.append(inlet.urn) - - task_outlets = _task_outlets(task) - for outlet in task_outlets: - datajob.outlets.append(outlet.urn) - - task.log.info(f"Emitting Datahub Datajob: {datajob}") - datajob.emit(emitter, callback=_make_emit_callback(task.log)) - - if context["_datahub_config"].capture_executions: - dpi = AirflowGenerator.run_datajob( - emitter=emitter, - cluster=context["_datahub_config"].cluster, - ti=context["ti"], - dag=dag, - dag_run=context["dag_run"], - datajob=datajob, - start_timestamp_millis=int(ti.start_date.timestamp() * 1000), - ) - - task.log.info(f"Emitted Start Datahub Dataprocess Instance: {dpi}") - - dpi = AirflowGenerator.complete_datajob( - emitter=emitter, - cluster=context["_datahub_config"].cluster, - ti=context["ti"], - dag_run=context["dag_run"], - result=status, - dag=dag, - datajob=datajob, - end_timestamp_millis=int(ti.end_date.timestamp() * 1000), - ) - task.log.info(f"Emitted Completed Data Process Instance: {dpi}") - - emitter.flush() - - -def datahub_pre_execution(context): - ti = context["ti"] - task: "BaseOperator" = ti.task - dag = context["dag"] - - task.log.info("Running Datahub pre_execute method") - - emitter = ( - DatahubGenericHook(context["_datahub_config"].datahub_conn_id) - .get_underlying_hook() - .make_emitter() - ) - - # This code is from the original airflow lineage code -> - # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py - inlets = get_inlets_from_task(task, context) - - datajob = AirflowGenerator.generate_datajob( - cluster=context["_datahub_config"].cluster, - task=context["ti"].task, - dag=dag, - capture_tags=context["_datahub_config"].capture_tags_info, - capture_owner=context["_datahub_config"].capture_ownership_info, - ) - - for inlet in inlets: - datajob.inlets.append(inlet.urn) - - task_outlets = _task_outlets(task) - - for outlet in task_outlets: - datajob.outlets.append(outlet.urn) - - task.log.info(f"Emitting Datahub dataJob {datajob}") - datajob.emit(emitter, callback=_make_emit_callback(task.log)) - - if context["_datahub_config"].capture_executions: - dpi = AirflowGenerator.run_datajob( - emitter=emitter, - cluster=context["_datahub_config"].cluster, - ti=context["ti"], - dag=dag, - dag_run=context["dag_run"], - datajob=datajob, - start_timestamp_millis=int(ti.start_date.timestamp() * 1000), - ) - - task.log.info(f"Emitting Datahub Dataprocess Instance: {dpi}") - - emitter.flush() - - -def _wrap_pre_execution(pre_execution): - def custom_pre_execution(context): - config = get_lineage_config() - if config.enabled: - context["_datahub_config"] = config - datahub_pre_execution(context) - - # Call original policy - if pre_execution: - pre_execution(context) - - return custom_pre_execution - - -def _wrap_on_failure_callback(on_failure_callback): - def custom_on_failure_callback(context): - config = get_lineage_config() - if config.enabled: - context["_datahub_config"] = config - try: - datahub_task_status_callback(context, status=InstanceRunResult.FAILURE) - except Exception as e: - if not config.graceful_exceptions: - raise e - else: - print(f"Exception: {traceback.format_exc()}") - - # Call original policy - if on_failure_callback: - on_failure_callback(context) - - return custom_on_failure_callback - - -def _wrap_on_success_callback(on_success_callback): - def custom_on_success_callback(context): - config = get_lineage_config() - if config.enabled: - context["_datahub_config"] = config - try: - datahub_task_status_callback(context, status=InstanceRunResult.SUCCESS) - except Exception as e: - if not config.graceful_exceptions: - raise e - else: - print(f"Exception: {traceback.format_exc()}") - - # Call original policy - if on_success_callback: - on_success_callback(context) - - return custom_on_success_callback - - -def task_policy(task: Union[BaseOperator, MappedOperator]) -> None: - task.log.debug(f"Setting task policy for Dag: {task.dag_id} Task: {task.task_id}") - # task.add_inlets(["auto"]) - # task.pre_execute = _wrap_pre_execution(task.pre_execute) - - # MappedOperator's callbacks don't have setters until Airflow 2.X.X - # https://github.com/apache/airflow/issues/24547 - # We can bypass this by going through partial_kwargs for now - if MappedOperator and isinstance(task, MappedOperator): # type: ignore - on_failure_callback_prop: property = getattr( - MappedOperator, TASK_ON_FAILURE_CALLBACK - ) - on_success_callback_prop: property = getattr( - MappedOperator, TASK_ON_SUCCESS_CALLBACK - ) - if not on_failure_callback_prop.fset or not on_success_callback_prop.fset: - task.log.debug( - "Using MappedOperator's partial_kwargs instead of callback properties" - ) - task.partial_kwargs[TASK_ON_FAILURE_CALLBACK] = _wrap_on_failure_callback( - task.on_failure_callback - ) - task.partial_kwargs[TASK_ON_SUCCESS_CALLBACK] = _wrap_on_success_callback( - task.on_success_callback - ) - return - - task.on_failure_callback = _wrap_on_failure_callback(task.on_failure_callback) # type: ignore - task.on_success_callback = _wrap_on_success_callback(task.on_success_callback) # type: ignore - # task.pre_execute = _wrap_pre_execution(task.pre_execute) - - -def _wrap_task_policy(policy): - if policy and hasattr(policy, "_task_policy_patched_by"): - return policy - - def custom_task_policy(task): - policy(task) - task_policy(task) - - # Add a flag to the policy to indicate that we've patched it. - custom_task_policy._task_policy_patched_by = "datahub_plugin" # type: ignore[attr-defined] - return custom_task_policy - - -def _patch_policy(settings): - if hasattr(settings, "task_policy"): - datahub_task_policy = _wrap_task_policy(settings.task_policy) - settings.task_policy = datahub_task_policy - - -def _patch_datahub_policy(): - with contextlib.suppress(ImportError): - import airflow_local_settings - - _patch_policy(airflow_local_settings) - - from airflow.models.dagbag import settings - - _patch_policy(settings) - - -_patch_datahub_policy() - - -class DatahubPlugin(AirflowPlugin): - name = "datahub_plugin" +__all__ = ["DatahubPlugin"] diff --git a/metadata-ingestion/src/datahub_provider/client/airflow_generator.py b/metadata-ingestion/src/datahub_provider/client/airflow_generator.py index d2d29b00d244f..d50ae152f2b1e 100644 --- a/metadata-ingestion/src/datahub_provider/client/airflow_generator.py +++ b/metadata-ingestion/src/datahub_provider/client/airflow_generator.py @@ -1,509 +1,3 @@ -from datahub_provider._airflow_compat import AIRFLOW_PATCHED +from datahub_airflow_plugin.client.airflow_generator import AirflowGenerator -from typing import TYPE_CHECKING, Dict, List, Optional, Set, Union, cast - -from airflow.configuration import conf - -from datahub.api.entities.datajob import DataFlow, DataJob -from datahub.api.entities.dataprocess.dataprocess_instance import ( - DataProcessInstance, - InstanceRunResult, -) -from datahub.metadata.schema_classes import DataProcessTypeClass -from datahub.utilities.urns.data_flow_urn import DataFlowUrn -from datahub.utilities.urns.data_job_urn import DataJobUrn - -assert AIRFLOW_PATCHED - -if TYPE_CHECKING: - from airflow import DAG - from airflow.models import DagRun, TaskInstance - - from datahub.emitter.kafka_emitter import DatahubKafkaEmitter - from datahub.emitter.rest_emitter import DatahubRestEmitter - from datahub_provider._airflow_shims import Operator - - -def _task_downstream_task_ids(operator: "Operator") -> Set[str]: - if hasattr(operator, "downstream_task_ids"): - return operator.downstream_task_ids - return operator._downstream_task_id # type: ignore[attr-defined,union-attr] - - -class AirflowGenerator: - @staticmethod - def _get_dependencies( - task: "Operator", dag: "DAG", flow_urn: DataFlowUrn - ) -> List[DataJobUrn]: - from datahub_provider._airflow_shims import ExternalTaskSensor - - # resolve URNs for upstream nodes in subdags upstream of the current task. - upstream_subdag_task_urns: List[DataJobUrn] = [] - - for upstream_task_id in task.upstream_task_ids: - upstream_task = dag.task_dict[upstream_task_id] - - # if upstream task is not a subdag, then skip it - upstream_subdag = getattr(upstream_task, "subdag", None) - if upstream_subdag is None: - continue - - # else, link the leaf tasks of the upstream subdag as upstream tasks - for upstream_subdag_task_id in upstream_subdag.task_dict: - upstream_subdag_task = upstream_subdag.task_dict[ - upstream_subdag_task_id - ] - - upstream_subdag_task_urn = DataJobUrn.create_from_ids( - job_id=upstream_subdag_task_id, data_flow_urn=str(flow_urn) - ) - - # if subdag task is a leaf task, then link it as an upstream task - if len(_task_downstream_task_ids(upstream_subdag_task)) == 0: - upstream_subdag_task_urns.append(upstream_subdag_task_urn) - - # resolve URNs for upstream nodes that trigger the subdag containing the current task. - # (if it is in a subdag at all) - upstream_subdag_triggers: List[DataJobUrn] = [] - - # subdags are always named with 'parent.child' style or Airflow won't run them - # add connection from subdag trigger(s) if subdag task has no upstreams - if ( - dag.is_subdag - and dag.parent_dag is not None - and len(task.upstream_task_ids) == 0 - ): - # filter through the parent dag's tasks and find the subdag trigger(s) - subdags = [ - x for x in dag.parent_dag.task_dict.values() if x.subdag is not None - ] - matched_subdags = [ - x for x in subdags if x.subdag and x.subdag.dag_id == dag.dag_id - ] - - # id of the task containing the subdag - subdag_task_id = matched_subdags[0].task_id - - # iterate through the parent dag's tasks and find the ones that trigger the subdag - for upstream_task_id in dag.parent_dag.task_dict: - upstream_task = dag.parent_dag.task_dict[upstream_task_id] - upstream_task_urn = DataJobUrn.create_from_ids( - data_flow_urn=str(flow_urn), job_id=upstream_task_id - ) - - # if the task triggers the subdag, link it to this node in the subdag - if subdag_task_id in _task_downstream_task_ids(upstream_task): - upstream_subdag_triggers.append(upstream_task_urn) - - # If the operator is an ExternalTaskSensor then we set the remote task as upstream. - # It is possible to tie an external sensor to DAG if external_task_id is omitted but currently we can't tie - # jobflow to anothet jobflow. - external_task_upstreams = [] - if task.task_type == "ExternalTaskSensor": - task = cast(ExternalTaskSensor, task) - if hasattr(task, "external_task_id") and task.external_task_id is not None: - external_task_upstreams = [ - DataJobUrn.create_from_ids( - job_id=task.external_task_id, - data_flow_urn=str( - DataFlowUrn.create_from_ids( - orchestrator=flow_urn.get_orchestrator_name(), - flow_id=task.external_dag_id, - env=flow_urn.get_env(), - ) - ), - ) - ] - # exclude subdag operator tasks since these are not emitted, resulting in empty metadata - upstream_tasks = ( - [ - DataJobUrn.create_from_ids(job_id=task_id, data_flow_urn=str(flow_urn)) - for task_id in task.upstream_task_ids - if getattr(dag.task_dict[task_id], "subdag", None) is None - ] - + upstream_subdag_task_urns - + upstream_subdag_triggers - + external_task_upstreams - ) - return upstream_tasks - - @staticmethod - def generate_dataflow( - cluster: str, - dag: "DAG", - capture_owner: bool = True, - capture_tags: bool = True, - ) -> DataFlow: - """ - Generates a Dataflow object from an Airflow DAG - :param cluster: str - name of the cluster - :param dag: DAG - - :param capture_tags: - :param capture_owner: - :return: DataFlow - Data generated dataflow - """ - id = dag.dag_id - orchestrator = "airflow" - description = f"{dag.description}\n\n{dag.doc_md or ''}" - data_flow = DataFlow( - env=cluster, id=id, orchestrator=orchestrator, description=description - ) - - flow_property_bag: Dict[str, str] = {} - - allowed_flow_keys = [ - "_access_control", - "_concurrency", - "_default_view", - "catchup", - "fileloc", - "is_paused_upon_creation", - "start_date", - "tags", - "timezone", - ] - - for key in allowed_flow_keys: - if hasattr(dag, key): - flow_property_bag[key] = repr(getattr(dag, key)) - - data_flow.properties = flow_property_bag - base_url = conf.get("webserver", "base_url") - data_flow.url = f"{base_url}/tree?dag_id={dag.dag_id}" - - if capture_owner and dag.owner: - data_flow.owners.add(dag.owner) - - if capture_tags and dag.tags: - data_flow.tags.update(dag.tags) - - return data_flow - - @staticmethod - def _get_description(task: "Operator") -> Optional[str]: - from airflow.models.baseoperator import BaseOperator - - if not isinstance(task, BaseOperator): - # TODO: Get docs for mapped operators. - return None - - if hasattr(task, "doc") and task.doc: - return task.doc - elif hasattr(task, "doc_md") and task.doc_md: - return task.doc_md - elif hasattr(task, "doc_json") and task.doc_json: - return task.doc_json - elif hasattr(task, "doc_yaml") and task.doc_yaml: - return task.doc_yaml - elif hasattr(task, "doc_rst") and task.doc_yaml: - return task.doc_yaml - return None - - @staticmethod - def generate_datajob( - cluster: str, - task: "Operator", - dag: "DAG", - set_dependencies: bool = True, - capture_owner: bool = True, - capture_tags: bool = True, - ) -> DataJob: - """ - - :param cluster: str - :param task: TaskIntance - :param dag: DAG - :param set_dependencies: bool - whether to extract dependencies from airflow task - :param capture_owner: bool - whether to extract owner from airflow task - :param capture_tags: bool - whether to set tags automatically from airflow task - :return: DataJob - returns the generated DataJob object - """ - dataflow_urn = DataFlowUrn.create_from_ids( - orchestrator="airflow", env=cluster, flow_id=dag.dag_id - ) - datajob = DataJob(id=task.task_id, flow_urn=dataflow_urn) - - # TODO add support for MappedOperator - datajob.description = AirflowGenerator._get_description(task) - - job_property_bag: Dict[str, str] = {} - - allowed_task_keys = [ - "_downstream_task_ids", - "_inlets", - "_outlets", - "_task_type", - "_task_module", - "depends_on_past", - "email", - "label", - "execution_timeout", - "sla", - "sql", - "task_id", - "trigger_rule", - "wait_for_downstream", - # In Airflow 2.3, _downstream_task_ids was renamed to downstream_task_ids - "downstream_task_ids", - # In Airflow 2.4, _inlets and _outlets were removed in favor of non-private versions. - "inlets", - "outlets", - ] - - for key in allowed_task_keys: - if hasattr(task, key): - job_property_bag[key] = repr(getattr(task, key)) - - datajob.properties = job_property_bag - base_url = conf.get("webserver", "base_url") - datajob.url = f"{base_url}/taskinstance/list/?flt1_dag_id_equals={datajob.flow_urn.get_flow_id()}&_flt_3_task_id={task.task_id}" - - if capture_owner and dag.owner: - datajob.owners.add(dag.owner) - - if capture_tags and dag.tags: - datajob.tags.update(dag.tags) - - if set_dependencies: - datajob.upstream_urns.extend( - AirflowGenerator._get_dependencies( - task=task, dag=dag, flow_urn=datajob.flow_urn - ) - ) - - return datajob - - @staticmethod - def create_datajob_instance( - cluster: str, - task: "Operator", - dag: "DAG", - data_job: Optional[DataJob] = None, - ) -> DataProcessInstance: - if data_job is None: - data_job = AirflowGenerator.generate_datajob(cluster, task=task, dag=dag) - dpi = DataProcessInstance.from_datajob( - datajob=data_job, id=task.task_id, clone_inlets=True, clone_outlets=True - ) - return dpi - - @staticmethod - def run_dataflow( - emitter: Union["DatahubRestEmitter", "DatahubKafkaEmitter"], - cluster: str, - dag_run: "DagRun", - start_timestamp_millis: Optional[int] = None, - dataflow: Optional[DataFlow] = None, - ) -> None: - if dataflow is None: - assert dag_run.dag - dataflow = AirflowGenerator.generate_dataflow(cluster, dag_run.dag) - - if start_timestamp_millis is None: - assert dag_run.execution_date - start_timestamp_millis = int(dag_run.execution_date.timestamp() * 1000) - - assert dag_run.run_id - dpi = DataProcessInstance.from_dataflow(dataflow=dataflow, id=dag_run.run_id) - - # This property only exists in Airflow2 - if hasattr(dag_run, "run_type"): - from airflow.utils.types import DagRunType - - if dag_run.run_type == DagRunType.SCHEDULED: - dpi.type = DataProcessTypeClass.BATCH_SCHEDULED - elif dag_run.run_type == DagRunType.MANUAL: - dpi.type = DataProcessTypeClass.BATCH_AD_HOC - else: - if dag_run.run_id.startswith("scheduled__"): - dpi.type = DataProcessTypeClass.BATCH_SCHEDULED - else: - dpi.type = DataProcessTypeClass.BATCH_AD_HOC - - property_bag: Dict[str, str] = {} - property_bag["run_id"] = str(dag_run.run_id) - property_bag["execution_date"] = str(dag_run.execution_date) - property_bag["end_date"] = str(dag_run.end_date) - property_bag["start_date"] = str(dag_run.start_date) - property_bag["creating_job_id"] = str(dag_run.creating_job_id) - property_bag["data_interval_start"] = str(dag_run.data_interval_start) - property_bag["data_interval_end"] = str(dag_run.data_interval_end) - property_bag["external_trigger"] = str(dag_run.external_trigger) - dpi.properties.update(property_bag) - - dpi.emit_process_start( - emitter=emitter, start_timestamp_millis=start_timestamp_millis - ) - - @staticmethod - def complete_dataflow( - emitter: Union["DatahubRestEmitter", "DatahubKafkaEmitter"], - cluster: str, - dag_run: "DagRun", - end_timestamp_millis: Optional[int] = None, - dataflow: Optional[DataFlow] = None, - ) -> None: - """ - - :param emitter: DatahubRestEmitter - the datahub rest emitter to emit the generated mcps - :param cluster: str - name of the cluster - :param dag_run: DagRun - :param end_timestamp_millis: Optional[int] - the completion time in milliseconds if not set the current time will be used. - :param dataflow: Optional[Dataflow] - """ - if dataflow is None: - assert dag_run.dag - dataflow = AirflowGenerator.generate_dataflow(cluster, dag_run.dag) - - assert dag_run.run_id - dpi = DataProcessInstance.from_dataflow(dataflow=dataflow, id=dag_run.run_id) - if end_timestamp_millis is None: - if dag_run.end_date is None: - raise Exception( - f"Dag {dag_run.dag_id}_{dag_run.run_id} is still running and unable to get end_date..." - ) - end_timestamp_millis = int(dag_run.end_date.timestamp() * 1000) - - # We should use DagRunState but it is not available in Airflow 1 - if dag_run.state == "success": - result = InstanceRunResult.SUCCESS - elif dag_run.state == "failed": - result = InstanceRunResult.FAILURE - else: - raise Exception( - f"Result should be either success or failure and it was {dag_run.state}" - ) - - dpi.emit_process_end( - emitter=emitter, - end_timestamp_millis=end_timestamp_millis, - result=result, - result_type="airflow", - ) - - @staticmethod - def run_datajob( - emitter: Union["DatahubRestEmitter", "DatahubKafkaEmitter"], - cluster: str, - ti: "TaskInstance", - dag: "DAG", - dag_run: "DagRun", - start_timestamp_millis: Optional[int] = None, - datajob: Optional[DataJob] = None, - attempt: Optional[int] = None, - emit_templates: bool = True, - ) -> DataProcessInstance: - if datajob is None: - datajob = AirflowGenerator.generate_datajob(cluster, ti.task, dag) - - assert dag_run.run_id - dpi = DataProcessInstance.from_datajob( - datajob=datajob, - id=f"{dag.dag_id}_{ti.task_id}_{dag_run.run_id}", - clone_inlets=True, - clone_outlets=True, - ) - job_property_bag: Dict[str, str] = {} - job_property_bag["run_id"] = str(dag_run.run_id) - job_property_bag["duration"] = str(ti.duration) - job_property_bag["start_date"] = str(ti.start_date) - job_property_bag["end_date"] = str(ti.end_date) - job_property_bag["execution_date"] = str(ti.execution_date) - job_property_bag["try_number"] = str(ti.try_number - 1) - job_property_bag["hostname"] = str(ti.hostname) - job_property_bag["max_tries"] = str(ti.max_tries) - # Not compatible with Airflow 1 - if hasattr(ti, "external_executor_id"): - job_property_bag["external_executor_id"] = str(ti.external_executor_id) - job_property_bag["pid"] = str(ti.pid) - job_property_bag["state"] = str(ti.state) - job_property_bag["operator"] = str(ti.operator) - job_property_bag["priority_weight"] = str(ti.priority_weight) - job_property_bag["unixname"] = str(ti.unixname) - job_property_bag["log_url"] = ti.log_url - dpi.properties.update(job_property_bag) - dpi.url = ti.log_url - - # This property only exists in Airflow2 - if hasattr(ti, "dag_run") and hasattr(ti.dag_run, "run_type"): - from airflow.utils.types import DagRunType - - if ti.dag_run.run_type == DagRunType.SCHEDULED: - dpi.type = DataProcessTypeClass.BATCH_SCHEDULED - elif ti.dag_run.run_type == DagRunType.MANUAL: - dpi.type = DataProcessTypeClass.BATCH_AD_HOC - else: - if dag_run.run_id.startswith("scheduled__"): - dpi.type = DataProcessTypeClass.BATCH_SCHEDULED - else: - dpi.type = DataProcessTypeClass.BATCH_AD_HOC - - if start_timestamp_millis is None: - assert ti.start_date - start_timestamp_millis = int(ti.start_date.timestamp() * 1000) - - if attempt is None: - attempt = ti.try_number - - dpi.emit_process_start( - emitter=emitter, - start_timestamp_millis=start_timestamp_millis, - attempt=attempt, - emit_template=emit_templates, - ) - return dpi - - @staticmethod - def complete_datajob( - emitter: Union["DatahubRestEmitter", "DatahubKafkaEmitter"], - cluster: str, - ti: "TaskInstance", - dag: "DAG", - dag_run: "DagRun", - end_timestamp_millis: Optional[int] = None, - result: Optional[InstanceRunResult] = None, - datajob: Optional[DataJob] = None, - ) -> DataProcessInstance: - """ - - :param emitter: DatahubRestEmitter - :param cluster: str - :param ti: TaskInstance - :param dag: DAG - :param dag_run: DagRun - :param end_timestamp_millis: Optional[int] - :param result: Optional[str] One of the result from datahub.metadata.schema_class.RunResultTypeClass - :param datajob: Optional[DataJob] - :return: DataProcessInstance - """ - if datajob is None: - datajob = AirflowGenerator.generate_datajob(cluster, ti.task, dag) - - if end_timestamp_millis is None: - assert ti.end_date - end_timestamp_millis = int(ti.end_date.timestamp() * 1000) - - if result is None: - # We should use TaskInstanceState but it is not available in Airflow 1 - if ti.state == "success": - result = InstanceRunResult.SUCCESS - elif ti.state == "failed": - result = InstanceRunResult.FAILURE - else: - raise Exception( - f"Result should be either success or failure and it was {ti.state}" - ) - - dpi = DataProcessInstance.from_datajob( - datajob=datajob, - id=f"{dag.dag_id}_{ti.task_id}_{dag_run.run_id}", - clone_inlets=True, - clone_outlets=True, - ) - dpi.emit_process_end( - emitter=emitter, - end_timestamp_millis=end_timestamp_millis, - result=result, - result_type="airflow", - ) - return dpi +__all__ = ["AirflowGenerator"] diff --git a/metadata-ingestion/src/datahub_provider/entities.py b/metadata-ingestion/src/datahub_provider/entities.py index bfccc2f22eeb8..13be4ecdad655 100644 --- a/metadata-ingestion/src/datahub_provider/entities.py +++ b/metadata-ingestion/src/datahub_provider/entities.py @@ -1,48 +1,3 @@ -from abc import abstractmethod -from typing import Optional +from datahub_airflow_plugin.entities import Dataset, Urn, _Entity -import attr - -import datahub.emitter.mce_builder as builder -from datahub.utilities.urns.urn import guess_entity_type - - -class _Entity: - @property - @abstractmethod - def urn(self) -> str: - pass - - -@attr.s(auto_attribs=True, str=True) -class Dataset(_Entity): - platform: str - name: str - env: str = builder.DEFAULT_ENV - platform_instance: Optional[str] = None - - @property - def urn(self): - return builder.make_dataset_urn_with_platform_instance( - platform=self.platform, - name=self.name, - platform_instance=self.platform_instance, - env=self.env, - ) - - -@attr.s(str=True) -class Urn(_Entity): - _urn: str = attr.ib() - - @_urn.validator - def _validate_urn(self, attribute, value): - if not value.startswith("urn:"): - raise ValueError("invalid urn provided: urns must start with 'urn:'") - if guess_entity_type(value) != "dataset": - # This is because DataJobs only support Dataset lineage. - raise ValueError("Airflow lineage currently only supports datasets") - - @property - def urn(self): - return self._urn +__all__ = ["_Entity", "Dataset", "Urn"] diff --git a/metadata-ingestion/src/datahub_provider/hooks/datahub.py b/metadata-ingestion/src/datahub_provider/hooks/datahub.py index e2e523fc5d6af..949d98ce631ed 100644 --- a/metadata-ingestion/src/datahub_provider/hooks/datahub.py +++ b/metadata-ingestion/src/datahub_provider/hooks/datahub.py @@ -1,216 +1,8 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union - -from airflow.exceptions import AirflowException -from airflow.hooks.base import BaseHook - -from datahub.metadata.com.linkedin.pegasus2avro.mxe import ( - MetadataChangeEvent, - MetadataChangeProposal, +from datahub_airflow_plugin.hooks.datahub import ( + BaseHook, + DatahubGenericHook, + DatahubKafkaHook, + DatahubRestHook, ) -if TYPE_CHECKING: - from airflow.models.connection import Connection - - from datahub.emitter.kafka_emitter import DatahubKafkaEmitter - from datahub.emitter.rest_emitter import DatahubRestEmitter - from datahub.ingestion.sink.datahub_kafka import KafkaSinkConfig - - -class DatahubRestHook(BaseHook): - """ - Creates a DataHub Rest API connection used to send metadata to DataHub. - Takes the endpoint for your DataHub Rest API in the Server Endpoint(host) field. - - URI example: :: - - AIRFLOW_CONN_DATAHUB_REST_DEFAULT='datahub-rest://rest-endpoint' - - :param datahub_rest_conn_id: Reference to the DataHub Rest connection. - :type datahub_rest_conn_id: str - """ - - conn_name_attr = "datahub_rest_conn_id" - default_conn_name = "datahub_rest_default" - conn_type = "datahub_rest" - hook_name = "DataHub REST Server" - - def __init__(self, datahub_rest_conn_id: str = default_conn_name) -> None: - super().__init__() - self.datahub_rest_conn_id = datahub_rest_conn_id - - @staticmethod - def get_connection_form_widgets() -> Dict[str, Any]: - return {} - - @staticmethod - def get_ui_field_behaviour() -> Dict: - """Returns custom field behavior""" - return { - "hidden_fields": ["port", "schema", "login"], - "relabeling": { - "host": "Server Endpoint", - }, - } - - def _get_config(self) -> Tuple[str, Optional[str], Optional[int]]: - conn: "Connection" = self.get_connection(self.datahub_rest_conn_id) - - host = conn.host - if not host: - raise AirflowException("host parameter is required") - if conn.port: - if ":" in host: - raise AirflowException( - "host parameter should not contain a port number if the port is specified separately" - ) - host = f"{host}:{conn.port}" - password = conn.password - timeout_sec = conn.extra_dejson.get("timeout_sec") - return (host, password, timeout_sec) - - def make_emitter(self) -> "DatahubRestEmitter": - import datahub.emitter.rest_emitter - - return datahub.emitter.rest_emitter.DatahubRestEmitter(*self._get_config()) - - def emit_mces(self, mces: List[MetadataChangeEvent]) -> None: - emitter = self.make_emitter() - - for mce in mces: - emitter.emit_mce(mce) - - def emit_mcps(self, mcps: List[MetadataChangeProposal]) -> None: - emitter = self.make_emitter() - - for mce in mcps: - emitter.emit_mcp(mce) - - -class DatahubKafkaHook(BaseHook): - """ - Creates a DataHub Kafka connection used to send metadata to DataHub. - Takes your kafka broker in the Kafka Broker(host) field. - - URI example: :: - - AIRFLOW_CONN_DATAHUB_KAFKA_DEFAULT='datahub-kafka://kafka-broker' - - :param datahub_kafka_conn_id: Reference to the DataHub Kafka connection. - :type datahub_kafka_conn_id: str - """ - - conn_name_attr = "datahub_kafka_conn_id" - default_conn_name = "datahub_kafka_default" - conn_type = "datahub_kafka" - hook_name = "DataHub Kafka Sink" - - def __init__(self, datahub_kafka_conn_id: str = default_conn_name) -> None: - super().__init__() - self.datahub_kafka_conn_id = datahub_kafka_conn_id - - @staticmethod - def get_connection_form_widgets() -> Dict[str, Any]: - return {} - - @staticmethod - def get_ui_field_behaviour() -> Dict: - """Returns custom field behavior""" - return { - "hidden_fields": ["port", "schema", "login", "password"], - "relabeling": { - "host": "Kafka Broker", - }, - } - - def _get_config(self) -> "KafkaSinkConfig": - import datahub.ingestion.sink.datahub_kafka - - conn = self.get_connection(self.datahub_kafka_conn_id) - obj = conn.extra_dejson - obj.setdefault("connection", {}) - if conn.host is not None: - if "bootstrap" in obj["connection"]: - raise AirflowException( - "Kafka broker specified twice (present in host and extra)" - ) - obj["connection"]["bootstrap"] = ":".join( - map(str, filter(None, [conn.host, conn.port])) - ) - config = datahub.ingestion.sink.datahub_kafka.KafkaSinkConfig.parse_obj(obj) - return config - - def make_emitter(self) -> "DatahubKafkaEmitter": - import datahub.emitter.kafka_emitter - - sink_config = self._get_config() - return datahub.emitter.kafka_emitter.DatahubKafkaEmitter(sink_config) - - def emit_mces(self, mces: List[MetadataChangeEvent]) -> None: - emitter = self.make_emitter() - errors = [] - - def callback(exc, msg): - if exc: - errors.append(exc) - - for mce in mces: - emitter.emit_mce_async(mce, callback) - - emitter.flush() - - if errors: - raise AirflowException(f"failed to push some MCEs: {errors}") - - def emit_mcps(self, mcps: List[MetadataChangeProposal]) -> None: - emitter = self.make_emitter() - errors = [] - - def callback(exc, msg): - if exc: - errors.append(exc) - - for mcp in mcps: - emitter.emit_mcp_async(mcp, callback) - - emitter.flush() - - if errors: - raise AirflowException(f"failed to push some MCPs: {errors}") - - -class DatahubGenericHook(BaseHook): - """ - Emits Metadata Change Events using either the DatahubRestHook or the - DatahubKafkaHook. Set up a DataHub Rest or Kafka connection to use. - - :param datahub_conn_id: Reference to the DataHub connection. - :type datahub_conn_id: str - """ - - def __init__(self, datahub_conn_id: str) -> None: - super().__init__() - self.datahub_conn_id = datahub_conn_id - - def get_underlying_hook(self) -> Union[DatahubRestHook, DatahubKafkaHook]: - conn = self.get_connection(self.datahub_conn_id) - - # We need to figure out the underlying hook type. First check the - # conn_type. If that fails, attempt to guess using the conn id name. - if conn.conn_type == DatahubRestHook.conn_type: - return DatahubRestHook(self.datahub_conn_id) - elif conn.conn_type == DatahubKafkaHook.conn_type: - return DatahubKafkaHook(self.datahub_conn_id) - elif "rest" in self.datahub_conn_id: - return DatahubRestHook(self.datahub_conn_id) - elif "kafka" in self.datahub_conn_id: - return DatahubKafkaHook(self.datahub_conn_id) - else: - raise AirflowException( - f"DataHub cannot handle conn_type {conn.conn_type} in {conn}" - ) - - def make_emitter(self) -> Union["DatahubRestEmitter", "DatahubKafkaEmitter"]: - return self.get_underlying_hook().make_emitter() - - def emit_mces(self, mces: List[MetadataChangeEvent]) -> None: - return self.get_underlying_hook().emit_mces(mces) +__all__ = ["DatahubRestHook", "DatahubKafkaHook", "DatahubGenericHook", "BaseHook"] diff --git a/metadata-ingestion/src/datahub_provider/lineage/datahub.py b/metadata-ingestion/src/datahub_provider/lineage/datahub.py index 009ce4bb29a97..ffe1adb8255b2 100644 --- a/metadata-ingestion/src/datahub_provider/lineage/datahub.py +++ b/metadata-ingestion/src/datahub_provider/lineage/datahub.py @@ -1,91 +1,6 @@ -import json -from typing import TYPE_CHECKING, Dict, List, Optional - -from airflow.configuration import conf -from airflow.lineage.backend import LineageBackend - -from datahub_provider._lineage_core import ( - DatahubBasicLineageConfig, - send_lineage_to_datahub, +from datahub_airflow_plugin.lineage.datahub import ( + DatahubLineageBackend, + DatahubLineageConfig, ) -if TYPE_CHECKING: - from airflow.models.baseoperator import BaseOperator - - -class DatahubLineageConfig(DatahubBasicLineageConfig): - # If set to true, most runtime errors in the lineage backend will be - # suppressed and will not cause the overall task to fail. Note that - # configuration issues will still throw exceptions. - graceful_exceptions: bool = True - - -def get_lineage_config() -> DatahubLineageConfig: - """Load the lineage config from airflow.cfg.""" - - # The kwargs pattern is also used for secret backends. - kwargs_str = conf.get("lineage", "datahub_kwargs", fallback="{}") - kwargs = json.loads(kwargs_str) - - # Continue to support top-level datahub_conn_id config. - datahub_conn_id = conf.get("lineage", "datahub_conn_id", fallback=None) - if datahub_conn_id: - kwargs["datahub_conn_id"] = datahub_conn_id - - return DatahubLineageConfig.parse_obj(kwargs) - - -class DatahubLineageBackend(LineageBackend): - """ - Sends lineage data from tasks to DataHub. - - Configurable via ``airflow.cfg`` as follows: :: - - # For REST-based: - airflow connections add --conn-type 'datahub_rest' 'datahub_rest_default' --conn-host 'http://localhost:8080' - # For Kafka-based (standard Kafka sink config can be passed via extras): - airflow connections add --conn-type 'datahub_kafka' 'datahub_kafka_default' --conn-host 'broker:9092' --conn-extra '{}' - - [lineage] - backend = datahub_provider.lineage.datahub.DatahubLineageBackend - datahub_kwargs = { - "datahub_conn_id": "datahub_rest_default", - "capture_ownership_info": true, - "capture_tags_info": true, - "graceful_exceptions": true } - # The above indentation is important! - """ - - def __init__(self) -> None: - super().__init__() - - # By attempting to get and parse the config, we can detect configuration errors - # ahead of time. The init method is only called in Airflow 2.x. - _ = get_lineage_config() - - # With Airflow 2.0, this can be an instance method. However, with Airflow 1.10.x, this - # method is used statically, even though LineageBackend declares it as an instance variable. - @staticmethod - def send_lineage( - operator: "BaseOperator", - inlets: Optional[List] = None, # unused - outlets: Optional[List] = None, # unused - context: Optional[Dict] = None, - ) -> None: - config = get_lineage_config() - if not config.enabled: - return - - try: - context = context or {} # ensure not None to satisfy mypy - send_lineage_to_datahub( - config, operator, operator.inlets, operator.outlets, context - ) - except Exception as e: - if config.graceful_exceptions: - operator.log.error(e) - operator.log.info( - "Suppressing error because graceful_exceptions is set" - ) - else: - raise +__all__ = ["DatahubLineageBackend", "DatahubLineageConfig"] diff --git a/metadata-ingestion/src/datahub_provider/operators/datahub.py b/metadata-ingestion/src/datahub_provider/operators/datahub.py index cd1d5187e6d85..08b1807cd4614 100644 --- a/metadata-ingestion/src/datahub_provider/operators/datahub.py +++ b/metadata-ingestion/src/datahub_provider/operators/datahub.py @@ -1,63 +1,6 @@ -from typing import List, Union - -from airflow.models import BaseOperator -from airflow.utils.decorators import apply_defaults - -from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent -from datahub_provider.hooks.datahub import ( - DatahubGenericHook, - DatahubKafkaHook, - DatahubRestHook, +from datahub_airflow_plugin.operators.datahub import ( + DatahubBaseOperator, + DatahubEmitterOperator, ) - -class DatahubBaseOperator(BaseOperator): - """ - The DatahubBaseOperator is used as a base operator all DataHub operators. - """ - - ui_color = "#4398c8" - - hook: Union[DatahubRestHook, DatahubKafkaHook] - - # mypy is not a fan of this. Newer versions of Airflow support proper typing for the decorator - # using PEP 612. However, there is not yet a good way to inherit the types of the kwargs from - # the superclass. - @apply_defaults # type: ignore[misc] - def __init__( # type: ignore[no-untyped-def] - self, - *, - datahub_conn_id: str, - **kwargs, - ): - super().__init__(**kwargs) - - self.datahub_conn_id = datahub_conn_id - self.generic_hook = DatahubGenericHook(datahub_conn_id) - - -class DatahubEmitterOperator(DatahubBaseOperator): - """ - Emits a Metadata Change Event to DataHub using either a DataHub - Rest or Kafka connection. - - :param datahub_conn_id: Reference to the DataHub Rest or Kafka Connection. - :type datahub_conn_id: str - """ - - # See above for why these mypy type issues are ignored here. - @apply_defaults # type: ignore[misc] - def __init__( # type: ignore[no-untyped-def] - self, - mces: List[MetadataChangeEvent], - datahub_conn_id: str, - **kwargs, - ): - super().__init__( - datahub_conn_id=datahub_conn_id, - **kwargs, - ) - self.mces = mces - - def execute(self, context): - self.generic_hook.get_underlying_hook().emit_mces(self.mces) +__all__ = ["DatahubEmitterOperator", "DatahubBaseOperator"] diff --git a/metadata-ingestion/src/datahub_provider/operators/datahub_assertion_operator.py b/metadata-ingestion/src/datahub_provider/operators/datahub_assertion_operator.py index 28be8ad860179..85469c10f271c 100644 --- a/metadata-ingestion/src/datahub_provider/operators/datahub_assertion_operator.py +++ b/metadata-ingestion/src/datahub_provider/operators/datahub_assertion_operator.py @@ -1,78 +1,5 @@ -import datetime -from typing import Any, List, Optional, Sequence, Union - -from airflow.models import BaseOperator - -from datahub.api.circuit_breaker import ( - AssertionCircuitBreaker, - AssertionCircuitBreakerConfig, +from datahub_airflow_plugin.operators.datahub_assertion_operator import ( + DataHubAssertionOperator, ) -from datahub_provider.hooks.datahub import DatahubRestHook - - -class DataHubAssertionOperator(BaseOperator): - r""" - DataHub Assertion Circuit Breaker Operator. - - :param urn: The DataHub dataset unique identifier. (templated) - :param datahub_rest_conn_id: The REST datahub connection id to communicate with DataHub - which is set as Airflow connection. - :param check_last_assertion_time: If set it checks assertions after the last operation was set on the dataset. - By default it is True. - :param time_delta: If verify_after_last_update is False it checks for assertion within the time delta. - """ - - template_fields: Sequence[str] = ("urn",) - circuit_breaker: AssertionCircuitBreaker - urn: Union[List[str], str] - - def __init__( # type: ignore[no-untyped-def] - self, - *, - urn: Union[List[str], str], - datahub_rest_conn_id: Optional[str] = None, - check_last_assertion_time: bool = True, - time_delta: Optional[datetime.timedelta] = None, - **kwargs, - ) -> None: - super().__init__(**kwargs) - hook: DatahubRestHook - if datahub_rest_conn_id is not None: - hook = DatahubRestHook(datahub_rest_conn_id=datahub_rest_conn_id) - else: - hook = DatahubRestHook() - - host, password, timeout_sec = hook._get_config() - self.urn = urn - config: AssertionCircuitBreakerConfig = AssertionCircuitBreakerConfig( - datahub_host=host, - datahub_token=password, - timeout=timeout_sec, - verify_after_last_update=check_last_assertion_time, - time_delta=time_delta if time_delta else datetime.timedelta(days=1), - ) - - self.circuit_breaker = AssertionCircuitBreaker(config=config) - - def execute(self, context: Any) -> bool: - if "datahub_silence_circuit_breakers" in context["dag_run"].conf: - self.log.info( - "Circuit breaker is silenced because datahub_silence_circuit_breakers config is set" - ) - return True - - self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") - if isinstance(self.urn, str): - urns = [self.urn] - elif isinstance(self.urn, list): - urns = self.urn - else: - raise Exception(f"urn parameter has invalid type {type(self.urn)}") - - for urn in urns: - self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") - ret = self.circuit_breaker.is_circuit_breaker_active(urn=urn) - if ret: - raise Exception(f"Dataset {self.urn} is not in consumable state") - return True +__all__ = ["DataHubAssertionOperator"] diff --git a/metadata-ingestion/src/datahub_provider/operators/datahub_assertion_sensor.py b/metadata-ingestion/src/datahub_provider/operators/datahub_assertion_sensor.py index ceb970dd8dc7f..e560ecb6145e0 100644 --- a/metadata-ingestion/src/datahub_provider/operators/datahub_assertion_sensor.py +++ b/metadata-ingestion/src/datahub_provider/operators/datahub_assertion_sensor.py @@ -1,78 +1,5 @@ -import datetime -from typing import Any, List, Optional, Sequence, Union - -from airflow.sensors.base import BaseSensorOperator - -from datahub.api.circuit_breaker import ( - AssertionCircuitBreaker, - AssertionCircuitBreakerConfig, +from datahub_airflow_plugin.operators.datahub_assertion_sensor import ( + DataHubAssertionSensor, ) -from datahub_provider.hooks.datahub import DatahubRestHook - - -class DataHubAssertionSensor(BaseSensorOperator): - r""" - DataHub Assertion Circuit Breaker Sensor. - - :param urn: The DataHub dataset unique identifier. (templated) - :param datahub_rest_conn_id: The REST datahub connection id to communicate with DataHub - which is set as Airflow connection. - :param check_last_assertion_time: If set it checks assertions after the last operation was set on the dataset. - By default it is True. - :param time_delta: If verify_after_last_update is False it checks for assertion within the time delta. - """ - - template_fields: Sequence[str] = ("urn",) - circuit_breaker: AssertionCircuitBreaker - urn: Union[List[str], str] - - def __init__( # type: ignore[no-untyped-def] - self, - *, - urn: Union[List[str], str], - datahub_rest_conn_id: Optional[str] = None, - check_last_assertion_time: bool = True, - time_delta: datetime.timedelta = datetime.timedelta(days=1), - **kwargs, - ) -> None: - super().__init__(**kwargs) - hook: DatahubRestHook - if datahub_rest_conn_id is not None: - hook = DatahubRestHook(datahub_rest_conn_id=datahub_rest_conn_id) - else: - hook = DatahubRestHook() - - host, password, timeout_sec = hook._get_config() - self.urn = urn - config: AssertionCircuitBreakerConfig = AssertionCircuitBreakerConfig( - datahub_host=host, - datahub_token=password, - timeout=timeout_sec, - verify_after_last_update=check_last_assertion_time, - time_delta=time_delta, - ) - self.circuit_breaker = AssertionCircuitBreaker(config=config) - - def poke(self, context: Any) -> bool: - if "datahub_silence_circuit_breakers" in context["dag_run"].conf: - self.log.info( - "Circuit breaker is silenced because datahub_silence_circuit_breakers config is set" - ) - return True - - self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") - if isinstance(self.urn, str): - urns = [self.urn] - elif isinstance(self.urn, list): - urns = self.urn - else: - raise Exception(f"urn parameter has invalid type {type(self.urn)}") - - for urn in urns: - self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") - ret = self.circuit_breaker.is_circuit_breaker_active(urn=urn) - if ret: - self.log.info(f"Dataset {self.urn} is not in consumable state") - return False - return True +__all__ = ["DataHubAssertionSensor"] diff --git a/metadata-ingestion/src/datahub_provider/operators/datahub_operation_operator.py b/metadata-ingestion/src/datahub_provider/operators/datahub_operation_operator.py index 6b2535994c101..6107e70c9eddd 100644 --- a/metadata-ingestion/src/datahub_provider/operators/datahub_operation_operator.py +++ b/metadata-ingestion/src/datahub_provider/operators/datahub_operation_operator.py @@ -1,97 +1,5 @@ -import datetime -from typing import Any, List, Optional, Sequence, Union - -from airflow.sensors.base import BaseSensorOperator - -from datahub.api.circuit_breaker import ( - OperationCircuitBreaker, - OperationCircuitBreakerConfig, +from datahub_airflow_plugin.operators.datahub_operation_operator import ( + DataHubOperationCircuitBreakerOperator, ) -from datahub_provider.hooks.datahub import DatahubRestHook - - -class DataHubOperationCircuitBreakerOperator(BaseSensorOperator): - r""" - DataHub Operation Circuit Breaker Operator. - - :param urn: The DataHub dataset unique identifier. (templated) - :param datahub_rest_conn_id: The REST datahub connection id to communicate with DataHub - which is set as Airflow connection. - :param partition: The partition to check the operation. - :param source_type: The partition to check the operation. :ref:`https://datahubproject.io/docs/graphql/enums#operationsourcetype` - - """ - - template_fields: Sequence[str] = ( - "urn", - "partition", - "source_type", - "operation_type", - ) - circuit_breaker: OperationCircuitBreaker - urn: Union[List[str], str] - partition: Optional[str] - source_type: Optional[str] - operation_type: Optional[str] - - def __init__( # type: ignore[no-untyped-def] - self, - *, - urn: Union[List[str], str], - datahub_rest_conn_id: Optional[str] = None, - time_delta: Optional[datetime.timedelta] = datetime.timedelta(days=1), - partition: Optional[str] = None, - source_type: Optional[str] = None, - operation_type: Optional[str] = None, - **kwargs, - ) -> None: - super().__init__(**kwargs) - hook: DatahubRestHook - if datahub_rest_conn_id is not None: - hook = DatahubRestHook(datahub_rest_conn_id=datahub_rest_conn_id) - else: - hook = DatahubRestHook() - - host, password, timeout_sec = hook._get_config() - - self.urn = urn - self.partition = partition - self.operation_type = operation_type - self.source_type = source_type - - config: OperationCircuitBreakerConfig = OperationCircuitBreakerConfig( - datahub_host=host, - datahub_token=password, - timeout=timeout_sec, - time_delta=time_delta, - ) - - self.circuit_breaker = OperationCircuitBreaker(config=config) - - def execute(self, context: Any) -> bool: - if "datahub_silence_circuit_breakers" in context["dag_run"].conf: - self.log.info( - "Circuit breaker is silenced because datahub_silence_circuit_breakers config is set" - ) - return True - - self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") - if isinstance(self.urn, str): - urns = [self.urn] - elif isinstance(self.urn, list): - urns = self.urn - else: - raise Exception(f"urn parameter has invalid type {type(self.urn)}") - - for urn in urns: - self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") - ret = self.circuit_breaker.is_circuit_breaker_active( - urn=urn, - partition=self.partition, - operation_type=self.operation_type, - source_type=self.source_type, - ) - if ret: - raise Exception(f"Dataset {self.urn} is not in consumable state") - return True +__all__ = ["DataHubOperationCircuitBreakerOperator"] diff --git a/metadata-ingestion/src/datahub_provider/operators/datahub_operation_sensor.py b/metadata-ingestion/src/datahub_provider/operators/datahub_operation_sensor.py index 8796215453500..902a342081490 100644 --- a/metadata-ingestion/src/datahub_provider/operators/datahub_operation_sensor.py +++ b/metadata-ingestion/src/datahub_provider/operators/datahub_operation_sensor.py @@ -1,100 +1,5 @@ -import datetime -from typing import Any, List, Optional, Sequence, Union - -from airflow.sensors.base import BaseSensorOperator - -from datahub.api.circuit_breaker import ( - OperationCircuitBreaker, - OperationCircuitBreakerConfig, +from datahub_airflow_plugin.operators.datahub_operation_sensor import ( + DataHubOperationCircuitBreakerSensor, ) -from datahub_provider.hooks.datahub import DatahubRestHook - - -class DataHubOperationCircuitBreakerSensor(BaseSensorOperator): - r""" - DataHub Operation Circuit Breaker Sensor. - - :param urn: The DataHub dataset unique identifier. (templated) - :param datahub_rest_conn_id: The REST datahub connection id to communicate with DataHub - which is set as Airflow connection. - :param partition: The partition to check the operation. - :param source_type: The source type to filter on. If not set it will accept any source type. - See valid values at: https://datahubproject.io/docs/graphql/enums#operationsourcetype - :param operation_type: The operation type to filter on. If not set it will accept any source type. - See valid values at: https://datahubproject.io/docs/graphql/enums/#operationtype - """ - - template_fields: Sequence[str] = ( - "urn", - "partition", - "source_type", - "operation_type", - ) - circuit_breaker: OperationCircuitBreaker - urn: Union[List[str], str] - partition: Optional[str] - source_type: Optional[str] - operation_type: Optional[str] - - def __init__( # type: ignore[no-untyped-def] - self, - *, - urn: Union[List[str], str], - datahub_rest_conn_id: Optional[str] = None, - time_delta: Optional[datetime.timedelta] = datetime.timedelta(days=1), - partition: Optional[str] = None, - source_type: Optional[str] = None, - operation_type: Optional[str] = None, - **kwargs, - ) -> None: - super().__init__(**kwargs) - hook: DatahubRestHook - if datahub_rest_conn_id is not None: - hook = DatahubRestHook(datahub_rest_conn_id=datahub_rest_conn_id) - else: - hook = DatahubRestHook() - - host, password, timeout_sec = hook._get_config() - - self.urn = urn - self.partition = partition - self.operation_type = operation_type - self.source_type = source_type - - config: OperationCircuitBreakerConfig = OperationCircuitBreakerConfig( - datahub_host=host, - datahub_token=password, - timeout=timeout_sec, - time_delta=time_delta, - ) - - self.circuit_breaker = OperationCircuitBreaker(config=config) - - def poke(self, context: Any) -> bool: - if "datahub_silence_circuit_breakers" in context["dag_run"].conf: - self.log.info( - "Circuit breaker is silenced because datahub_silence_circuit_breakers config is set" - ) - return True - - self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") - if isinstance(self.urn, str): - urns = [self.urn] - elif isinstance(self.urn, list): - urns = self.urn - else: - raise Exception(f"urn parameter has invalid type {type(self.urn)}") - - for urn in urns: - self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") - ret = self.circuit_breaker.is_circuit_breaker_active( - urn=urn, - partition=self.partition, - operation_type=self.operation_type, - source_type=self.source_type, - ) - if ret: - self.log.info(f"Dataset {self.urn} is not in consumable state") - return False - return True +__all__ = ["DataHubOperationCircuitBreakerSensor"] From 4c69f9a1d688dd8c57c2e55ff40a74a4509e9487 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 30 Aug 2023 16:36:05 -0700 Subject: [PATCH 25/41] fix(ingest/athena): fix container linting (#8761) --- metadata-ingestion/src/datahub/ingestion/source/sql/athena.py | 4 ++-- .../src/datahub/ingestion/source/sql/sql_utils.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py index 53501b9a536ee..9cb613bde1e9f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py @@ -9,7 +9,7 @@ from sqlalchemy.engine.reflection import Inspector from datahub.configuration.validate_field_rename import pydantic_renamed_field -from datahub.emitter.mcp_builder import ContainerKey +from datahub.emitter.mcp_builder import ContainerKey, DatabaseKey from datahub.ingestion.api.decorators import ( SourceCapability, SupportStatus, @@ -205,7 +205,7 @@ def gen_schema_containers( extra_properties=extra_properties, ) - def get_database_container_key(self, db_name: str, schema: str) -> ContainerKey: + def get_database_container_key(self, db_name: str, schema: str) -> DatabaseKey: # Because our overridden get_allowed_schemas method returns db_name as the schema name, # the db_name and schema here will be the same. Hence, we just ignore the schema parameter. # Based on community feedback, db_name only available if it is explicitly specified in the connection string. diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_utils.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_utils.py index c5baf148b0e5e..723a8c5fd8669 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_utils.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_utils.py @@ -35,7 +35,7 @@ def gen_schema_key( platform: str, platform_instance: Optional[str], env: Optional[str], -) -> ContainerKey: +) -> SchemaKey: return SchemaKey( database=db_name, schema=schema, @@ -48,7 +48,7 @@ def gen_schema_key( def gen_database_key( database: str, platform: str, platform_instance: Optional[str], env: Optional[str] -) -> ContainerKey: +) -> DatabaseKey: return DatabaseKey( database=database, platform=platform, From e7d140f82df393b98821f8a49e74ae3995a01ead Mon Sep 17 00:00:00 2001 From: Raj Tekal Date: Thu, 31 Aug 2023 00:02:03 -0400 Subject: [PATCH 26/41] fix(datahub-frontend) Give permission for start.sh so it can run (#8594) --- docker/datahub-frontend/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 23c04972209ed..9efc0d2ce8753 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -29,6 +29,8 @@ FROM base as dev-install VOLUME [ "/datahub-frontend" ] FROM ${APP_ENV}-install as final +COPY ./docker/datahub-frontend/start.sh / +RUN chown datahub:datahub /start.sh && chmod 755 /start.sh USER datahub ARG SERVER_PORT=9002 @@ -37,5 +39,4 @@ RUN echo $SERVER_PORT EXPOSE $SERVER_PORT HEALTHCHECK --start-period=2m --retries=4 CMD curl --fail http://localhost:$SERVER_PORT/admin || exit 1 -COPY ./docker/datahub-frontend/start.sh / CMD ./start.sh From 21b2851be7207106e3cff7fe995a1475346f942d Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 31 Aug 2023 09:43:39 -0700 Subject: [PATCH 27/41] feat(sql-parser): schema-aware output column casing (#8760) Co-authored-by: Tamas Nemeth --- .../src/datahub/utilities/sqlglot_lineage.py | 41 ++++++++----- .../test_expand_select_star_basic.json | 20 +++---- .../goldens/test_select_from_union.json | 4 +- .../test_snowflake_case_statement.json | 4 +- .../test_snowflake_column_normalization.json | 8 +-- ...t_snowflake_ctas_column_normalization.json | 59 +++++++++++++++++++ .../test_snowflake_default_normalization.json | 12 ++-- .../unit/sql_parsing/test_sqlglot_lineage.py | 34 +++++++++++ 8 files changed, 144 insertions(+), 38 deletions(-) create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_ctas_column_normalization.json diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index 534cac5cef2aa..d677b0874b985 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -459,6 +459,19 @@ def _sqlglot_force_column_normalizer( # statement.sql(pretty=True, dialect=dialect), # ) + def _schema_aware_fuzzy_column_resolve( + table: Optional[_TableName], sqlglot_column: str + ) -> str: + default_col_name = ( + sqlglot_column.lower() if use_case_insensitive_cols else sqlglot_column + ) + if table: + return table_schema_normalized_mapping[table].get( + sqlglot_column, default_col_name + ) + else: + return default_col_name + # Optimize the statement + qualify column references. logger.debug( "Prior to qualification sql %s", statement.sql(pretty=True, dialect=dialect) @@ -540,10 +553,8 @@ def _sqlglot_force_column_normalizer( normalized_col = sqlglot.parse_one(node.name).this.name if node.subfield: normalized_col = f"{normalized_col}.{node.subfield}" - col = table_schema_normalized_mapping[table_ref].get( - normalized_col, normalized_col - ) + col = _schema_aware_fuzzy_column_resolve(table_ref, normalized_col) direct_col_upstreams.add(_ColumnRef(table=table_ref, column=col)) else: # This branch doesn't matter. For example, a count(*) column would go here, and @@ -557,6 +568,9 @@ def _sqlglot_force_column_normalizer( # This is a bit jank since we're relying on sqlglot internals, but it seems to be # the best way to do it. output_col = original_col_expression.this.sql(dialect=dialect) + + output_col = _schema_aware_fuzzy_column_resolve(output_table, output_col) + if not direct_col_upstreams: logger.debug(f' "{output_col}" has no upstreams') column_lineage.append( @@ -699,10 +713,7 @@ def _sqlglot_lineage_inner( # Fetch schema info for the relevant tables. table_name_urn_mapping: Dict[_TableName, str] = {} table_name_schema_mapping: Dict[_TableName, SchemaInfo] = {} - for table, is_input in itertools.chain( - [(table, True) for table in tables], - [(table, False) for table in modified], - ): + for table in itertools.chain(tables, modified): # For select statements, qualification will be a no-op. For other statements, this # is where the qualification actually happens. qualified_table = table.qualified( @@ -712,19 +723,21 @@ def _sqlglot_lineage_inner( urn, schema_info = schema_resolver.resolve_table(qualified_table) table_name_urn_mapping[qualified_table] = urn - if is_input and schema_info: + if schema_info: table_name_schema_mapping[qualified_table] = schema_info # Also include the original, non-qualified table name in the urn mapping. table_name_urn_mapping[table] = urn + total_tables_discovered = len(tables) + len(modified) + total_schemas_resolved = len(table_name_schema_mapping) debug_info = SqlParsingDebugInfo( - confidence=0.9 if len(tables) == len(table_name_schema_mapping) + confidence=0.9 if total_tables_discovered == total_schemas_resolved # If we're missing any schema info, our confidence will be in the 0.2-0.5 range depending # on how many tables we were able to resolve. - else 0.2 + 0.3 * len(table_name_schema_mapping) / len(tables), - tables_discovered=len(tables), - table_schemas_resolved=len(table_name_schema_mapping), + else 0.2 + 0.3 * total_schemas_resolved / total_tables_discovered, + tables_discovered=total_tables_discovered, + table_schemas_resolved=total_schemas_resolved, ) logger.debug( f"Resolved {len(table_name_schema_mapping)} of {len(tables)} table schemas" @@ -789,7 +802,8 @@ def sqlglot_lineage( This is a schema-aware lineage generator, meaning that it will use the schema information for the tables involved to generate lineage information for the columns involved. The schema_resolver is responsible for providing - the table schema information. + the table schema information. In most cases, the DataHubGraph can be used + to construct a schema_resolver that will fetch schemas from DataHub. The parser supports most types of DML statements (SELECT, INSERT, UPDATE, DELETE, MERGE) as well as CREATE TABLE AS SELECT (CTAS) statements. It @@ -859,7 +873,6 @@ def create_lineage_sql_parsed_result( schema: Optional[str] = None, graph: Optional[DataHubGraph] = None, ) -> Optional["SqlParsingResult"]: - parsed_result: Optional["SqlParsingResult"] = None try: schema_resolver = ( diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_expand_select_star_basic.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_expand_select_star_basic.json index e456e4450c50a..e241bdd08e243 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_expand_select_star_basic.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_expand_select_star_basic.json @@ -8,7 +8,7 @@ { "downstream": { "table": null, - "column": "TOTAL_AGG" + "column": "total_agg" }, "upstreams": [ { @@ -20,7 +20,7 @@ { "downstream": { "table": null, - "column": "ORDERKEY" + "column": "orderkey" }, "upstreams": [ { @@ -32,7 +32,7 @@ { "downstream": { "table": null, - "column": "CUSTKEY" + "column": "custkey" }, "upstreams": [ { @@ -44,7 +44,7 @@ { "downstream": { "table": null, - "column": "ORDERSTATUS" + "column": "orderstatus" }, "upstreams": [ { @@ -56,7 +56,7 @@ { "downstream": { "table": null, - "column": "TOTALPRICE" + "column": "totalprice" }, "upstreams": [ { @@ -68,7 +68,7 @@ { "downstream": { "table": null, - "column": "ORDERDATE" + "column": "orderdate" }, "upstreams": [ { @@ -80,7 +80,7 @@ { "downstream": { "table": null, - "column": "ORDERPRIORITY" + "column": "orderpriority" }, "upstreams": [ { @@ -92,7 +92,7 @@ { "downstream": { "table": null, - "column": "CLERK" + "column": "clerk" }, "upstreams": [ { @@ -104,7 +104,7 @@ { "downstream": { "table": null, - "column": "SHIPPRIORITY" + "column": "shippriority" }, "upstreams": [ { @@ -116,7 +116,7 @@ { "downstream": { "table": null, - "column": "COMMENT" + "column": "comment" }, "upstreams": [ { diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_select_from_union.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_select_from_union.json index 8e1fd453ce09d..2340b2e95b0d0 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_select_from_union.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_select_from_union.json @@ -9,14 +9,14 @@ { "downstream": { "table": null, - "column": "LABEL" + "column": "label" }, "upstreams": [] }, { "downstream": { "table": null, - "column": "TOTAL_AGG" + "column": "total_agg" }, "upstreams": [ { diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_case_statement.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_case_statement.json index 7d1a4f2039b10..64cd80e9a2d69 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_case_statement.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_case_statement.json @@ -8,7 +8,7 @@ { "downstream": { "table": null, - "column": "TOTAL_PRICE_CATEGORY" + "column": "total_price_category" }, "upstreams": [ { @@ -20,7 +20,7 @@ { "downstream": { "table": null, - "column": "TOTAL_PRICE_SUCCESS" + "column": "total_price_success" }, "upstreams": [ { diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_column_normalization.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_column_normalization.json index 694bec3800dbf..7b22a46757e39 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_column_normalization.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_column_normalization.json @@ -8,7 +8,7 @@ { "downstream": { "table": null, - "column": "TOTAL_AGG" + "column": "total_agg" }, "upstreams": [ { @@ -20,7 +20,7 @@ { "downstream": { "table": null, - "column": "TOTAL_AVG" + "column": "total_avg" }, "upstreams": [ { @@ -32,7 +32,7 @@ { "downstream": { "table": null, - "column": "TOTAL_MIN" + "column": "total_min" }, "upstreams": [ { @@ -44,7 +44,7 @@ { "downstream": { "table": null, - "column": "TOTAL_MAX" + "column": "total_max" }, "upstreams": [ { diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_ctas_column_normalization.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_ctas_column_normalization.json new file mode 100644 index 0000000000000..c912d99a3a8a3 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_ctas_column_normalization.json @@ -0,0 +1,59 @@ +{ + "query_type": "CREATE", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)" + ], + "out_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders_normalized,PROD)" + ], + "column_lineage": [ + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders_normalized,PROD)", + "column": "Total_Agg" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)", + "column": "TotalPrice" + } + ] + }, + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders_normalized,PROD)", + "column": "total_avg" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)", + "column": "TotalPrice" + } + ] + }, + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders_normalized,PROD)", + "column": "TOTAL_MIN" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)", + "column": "TotalPrice" + } + ] + }, + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders_normalized,PROD)", + "column": "total_max" + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)", + "column": "TotalPrice" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_default_normalization.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_default_normalization.json index 157745854128f..2af308ec60623 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_default_normalization.json +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_snowflake_default_normalization.json @@ -11,7 +11,7 @@ { "downstream": { "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.active_customer_ltv,PROD)", - "column": "USER_FK" + "column": "user_fk" }, "upstreams": [ { @@ -23,7 +23,7 @@ { "downstream": { "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.active_customer_ltv,PROD)", - "column": "EMAIL" + "column": "email" }, "upstreams": [ { @@ -35,7 +35,7 @@ { "downstream": { "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.active_customer_ltv,PROD)", - "column": "LAST_PURCHASE_DATE" + "column": "last_purchase_date" }, "upstreams": [ { @@ -47,7 +47,7 @@ { "downstream": { "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.active_customer_ltv,PROD)", - "column": "LIFETIME_PURCHASE_AMOUNT" + "column": "lifetime_purchase_amount" }, "upstreams": [ { @@ -59,7 +59,7 @@ { "downstream": { "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.active_customer_ltv,PROD)", - "column": "LIFETIME_PURCHASE_COUNT" + "column": "lifetime_purchase_count" }, "upstreams": [ { @@ -71,7 +71,7 @@ { "downstream": { "table": "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.active_customer_ltv,PROD)", - "column": "AVERAGE_PURCHASE_AMOUNT" + "column": "average_purchase_amount" }, "upstreams": [ { diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index 5a294be150fa0..7581d3bac010e 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -287,6 +287,40 @@ def test_snowflake_column_normalization(): ) +def test_snowflake_ctas_column_normalization(): + # For CTAS statements, we also should try to match the output table's + # column name casing. This is technically incorrect since we have the + # exact column names from the query, but necessary to match our column + # name normalization behavior in the Snowflake source. + + assert_sql_result( + """ +CREATE TABLE snowflake_sample_data.tpch_sf1.orders_normalized +AS +SELECT + SUM(o."totalprice") as Total_Agg, + AVG("TotalPrice") as TOTAL_AVG, + MIN("TOTALPRICE") as TOTAL_MIN, + MAX(TotalPrice) as Total_Max +FROM snowflake_sample_data.tpch_sf1.orders o +""", + dialect="snowflake", + schemas={ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders,PROD)": { + "orderkey": "NUMBER", + "TotalPrice": "FLOAT", + }, + "urn:li:dataset:(urn:li:dataPlatform:snowflake,snowflake_sample_data.tpch_sf1.orders_normalized,PROD)": { + "Total_Agg": "FLOAT", + "total_avg": "FLOAT", + "TOTAL_MIN": "FLOAT", + # Purposely excluding total_max to test out the fallback behavior. + }, + }, + expected_file=RESOURCE_DIR / "test_snowflake_ctas_column_normalization.json", + ) + + def test_snowflake_case_statement(): assert_sql_result( """ From a4e726872b4de14d46648231ec1fd799e258cec2 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Thu, 31 Aug 2023 12:44:24 -0400 Subject: [PATCH 28/41] fix(ingest/bigquery): Filter out fine grained lineage with no upstreams (#8758) --- .../ingestion/source/bigquery_v2/lineage.py | 1 + .../tests/unit/test_bigquery_lineage.py | 50 ++++++++++++++++--- 2 files changed, 45 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index 842e3d2144600..341952d95e7d7 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -183,6 +183,7 @@ def make_lineage_edges_from_parsing_result( column_mapping=frozenset( LineageEdgeColumnMapping(out_column=out_column, in_columns=in_columns) for out_column, in_columns in column_mapping.items() + if in_columns ), auditStamp=audit_stamp, type=lineage_type, diff --git a/metadata-ingestion/tests/unit/test_bigquery_lineage.py b/metadata-ingestion/tests/unit/test_bigquery_lineage.py index c9308fd89ef72..9b09fa36ba586 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_lineage.py +++ b/metadata-ingestion/tests/unit/test_bigquery_lineage.py @@ -1,6 +1,8 @@ import datetime from typing import Dict, List, Set +import pytest + from datahub.ingestion.source.bigquery_v2.bigquery_audit import ( BigQueryTableRef, QueryEvent, @@ -14,15 +16,17 @@ from datahub.utilities.sqlglot_lineage import SchemaResolver -def test_lineage_with_timestamps(): - config = BigQueryV2Config() - report = BigQueryV2Report() - extractor: BigqueryLineageExtractor = BigqueryLineageExtractor(config, report) - lineage_entries: List[QueryEvent] = [ +@pytest.fixture +def lineage_entries() -> List[QueryEvent]: + return [ QueryEvent( timestamp=datetime.datetime.now(tz=datetime.timezone.utc), actor_email="bla@bla.com", - query="testQuery", + query=""" + INSERT INTO `my_project.my_dataset.my_table` + SELECT first.a, second.b FROM `my_project.my_dataset.my_source_table1` first + LEFT JOIN `my_project.my_dataset.my_source_table2` second ON first.id = second.id + """, statementType="SELECT", project_id="proj_12344", end_time=None, @@ -73,6 +77,12 @@ def test_lineage_with_timestamps(): ), ] + +def test_lineage_with_timestamps(lineage_entries: List[QueryEvent]) -> None: + config = BigQueryV2Config() + report = BigQueryV2Report() + extractor: BigqueryLineageExtractor = BigqueryLineageExtractor(config, report) + bq_table = BigQueryTableRef.from_string_name( "projects/my_project/datasets/my_dataset/tables/my_table" ) @@ -90,3 +100,31 @@ def test_lineage_with_timestamps(): ) assert upstream_lineage assert len(upstream_lineage.upstreams) == 4 + + +def test_column_level_lineage(lineage_entries: List[QueryEvent]) -> None: + config = BigQueryV2Config(extract_column_lineage=True, incremental_lineage=False) + report = BigQueryV2Report() + extractor: BigqueryLineageExtractor = BigqueryLineageExtractor(config, report) + + bq_table = BigQueryTableRef.from_string_name( + "projects/my_project/datasets/my_dataset/tables/my_table" + ) + + lineage_map: Dict[str, Set[LineageEdge]] = extractor._create_lineage_map( + lineage_entries[:1], + sql_parser_schema_resolver=SchemaResolver(platform="bigquery"), + ) + + upstream_lineage = extractor.get_lineage_for_table( + bq_table=bq_table, + bq_table_urn="urn:li:dataset:(urn:li:dataPlatform:bigquery,my_project.my_dataset.my_table,PROD)", + lineage_metadata=lineage_map, + platform="bigquery", + ) + assert upstream_lineage + assert len(upstream_lineage.upstreams) == 2 + assert ( + upstream_lineage.fineGrainedLineages + and len(upstream_lineage.fineGrainedLineages) == 2 + ) From 6fe60a274efcc9568e4fd035d1230d9cfcf9990c Mon Sep 17 00:00:00 2001 From: cccs-eric Date: Thu, 31 Aug 2023 13:01:05 -0400 Subject: [PATCH 29/41] feat(iceberg): Upgrade Iceberg ingestion source to pyiceberg 0.4.0 (#8357) Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com> Co-authored-by: Fokko Driesprong Co-authored-by: Andrew Sikowitz --- .../docs/sources/iceberg/iceberg_recipe.yml | 19 +- metadata-ingestion/setup.py | 9 +- .../ingestion/source/azure/__init__.py | 0 .../ingestion/source/azure/azure_common.py | 88 -- .../ingestion/source/iceberg/iceberg.py | 398 +++++---- .../source/iceberg/iceberg_common.py | 144 +-- .../source/iceberg/iceberg_profiler.py | 157 ++-- .../tests/integration/iceberg/.gitignore | 3 + .../integration/iceberg/docker-compose.yml | 74 ++ .../iceberg_deleted_table_mces_golden.json | 184 ++++ .../iceberg/iceberg_ingest_mces_golden.json | 153 ++++ .../iceberg/iceberg_profile_mces_golden.json | 216 +++++ .../iceberg/iceberg_profile_to_file.yml | 25 + .../integration/iceberg/iceberg_to_file.yml | 22 + .../tests/integration/iceberg/setup/create.py | 46 + .../tests/integration/iceberg/setup/delete.py | 5 + .../ingest_test/iceberg_mces_golden.json | 131 --- .../iceberg_test/metadata/v1.metadata.json | 105 --- .../iceberg_test/metadata/v2.metadata.json | 118 --- .../iceberg_test/metadata/version-hint.text | 1 - ...-bb5c-4ffd-8ead-08f33fa2675d-00001.parquet | Bin 1089 -> 0 bytes ...-498a-4ce9-b525-8242758d18f8-00001.parquet | Bin 1088 -> 0 bytes ...-191f-4a11-9953-09435ffce01d-00001.parquet | Bin 1025 -> 0 bytes ...-b547-40b9-89ca-caf4fcfe6685-00001.parquet | Bin 1023 -> 0 bytes ...-e02b-44b1-8ec8-4dfa287c3bd5.metadata.json | 73 -- ...-5f25-4180-99e2-065ef0b9791b.metadata.json | 98 -- ...-4c12-46d0-9a75-ce3578ec03d4.metadata.json | 124 --- ...acaffc-9bed-4d97-8ddd-0ea1ea15a2b8-m0.avro | Bin 6032 -> 0 bytes ...0bd970-e5ef-4843-abcb-e96a35a8f14d-m0.avro | Bin 6036 -> 0 bytes ...-ec0bd970-e5ef-4843-abcb-e96a35a8f14d.avro | Bin 3839 -> 0 bytes ...-23acaffc-9bed-4d97-8ddd-0ea1ea15a2b8.avro | Bin 3771 -> 0 bytes .../profiling/metadata/version-hint.text | 1 - .../profiling_test/iceberg_mces_golden.json | 129 --- .../iceberg_deleted_table_mces_golden.json | 159 ---- .../iceberg_test/metadata/v1.metadata.json | 105 --- .../iceberg_test/metadata/v2.metadata.json | 118 --- .../iceberg_test/metadata/version-hint.text | 1 - .../iceberg_test_2/metadata/v1.metadata.json | 105 --- .../iceberg_test_2/metadata/v2.metadata.json | 118 --- .../iceberg_test_2/metadata/version-hint.text | 1 - .../iceberg_test/metadata/v1.metadata.json | 105 --- .../iceberg_test/metadata/v2.metadata.json | 118 --- .../iceberg_test/metadata/version-hint.text | 1 - .../tests/integration/iceberg/test_iceberg.py | 316 +++---- metadata-ingestion/tests/unit/test_iceberg.py | 837 ++++++++++-------- 45 files changed, 1653 insertions(+), 2654 deletions(-) delete mode 100644 metadata-ingestion/src/datahub/ingestion/source/azure/__init__.py delete mode 100644 metadata-ingestion/src/datahub/ingestion/source/azure/azure_common.py create mode 100644 metadata-ingestion/tests/integration/iceberg/.gitignore create mode 100644 metadata-ingestion/tests/integration/iceberg/docker-compose.yml create mode 100644 metadata-ingestion/tests/integration/iceberg/iceberg_deleted_table_mces_golden.json create mode 100644 metadata-ingestion/tests/integration/iceberg/iceberg_ingest_mces_golden.json create mode 100644 metadata-ingestion/tests/integration/iceberg/iceberg_profile_mces_golden.json create mode 100644 metadata-ingestion/tests/integration/iceberg/iceberg_profile_to_file.yml create mode 100644 metadata-ingestion/tests/integration/iceberg/iceberg_to_file.yml create mode 100644 metadata-ingestion/tests/integration/iceberg/setup/create.py create mode 100644 metadata-ingestion/tests/integration/iceberg/setup/delete.py delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/iceberg_mces_golden.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/v1.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/v2.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/version-hint.text delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/data/00000-0-72133c37-bb5c-4ffd-8ead-08f33fa2675d-00001.parquet delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/data/00000-3-c638dd0f-498a-4ce9-b525-8242758d18f8-00001.parquet delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/data/00001-1-5f69f6ed-191f-4a11-9953-09435ffce01d-00001.parquet delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/data/00001-4-b21a5375-b547-40b9-89ca-caf4fcfe6685-00001.parquet delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/00000-331b9f67-e02b-44b1-8ec8-4dfa287c3bd5.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/00001-fb50681e-5f25-4180-99e2-065ef0b9791b.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/00002-cc241948-4c12-46d0-9a75-ce3578ec03d4.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/23acaffc-9bed-4d97-8ddd-0ea1ea15a2b8-m0.avro delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/ec0bd970-e5ef-4843-abcb-e96a35a8f14d-m0.avro delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/snap-4220723025353071767-1-ec0bd970-e5ef-4843-abcb-e96a35a8f14d.avro delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/snap-4437197002876030991-1-23acaffc-9bed-4d97-8ddd-0ea1ea15a2b8.avro delete mode 100755 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/version-hint.text delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/iceberg_mces_golden.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/iceberg_deleted_table_mces_golden.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/v1.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/v2.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/version-hint.text delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/v1.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/v2.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/version-hint.text delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/v1.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/v2.metadata.json delete mode 100644 metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/version-hint.text diff --git a/metadata-ingestion/docs/sources/iceberg/iceberg_recipe.yml b/metadata-ingestion/docs/sources/iceberg/iceberg_recipe.yml index 28bce8a478211..8caedafbea50e 100644 --- a/metadata-ingestion/docs/sources/iceberg/iceberg_recipe.yml +++ b/metadata-ingestion/docs/sources/iceberg/iceberg_recipe.yml @@ -2,14 +2,17 @@ source: type: "iceberg" config: env: PROD - adls: - # Will be translated to https://{account_name}.dfs.core.windows.net - account_name: my_adls_account - # Can use sas_token or account_key - sas_token: "${SAS_TOKEN}" - # account_key: "${ACCOUNT_KEY}" - container_name: warehouse - base_path: iceberg + catalog: + name: my_iceberg_catalog + type: rest + # Catalog configuration follows pyiceberg's documentation (https://py.iceberg.apache.org/configuration) + config: + uri: http://localhost:8181 + s3.access-key-id: admin + s3.secret-access-key: password + s3.region: us-east-1 + warehouse: s3a://warehouse/wh/ + s3.endpoint: http://localhost:9000 platform_instance: my_iceberg_catalog table_pattern: allow: diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index aa01882a44aa6..32e1cf926cc68 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -229,8 +229,8 @@ def get_long_description(): iceberg_common = { # Iceberg Python SDK - "acryl-iceberg-legacy==0.0.4", - "azure-identity==1.10.0", + "pyiceberg", + "pyarrow>=9.0.0, <13.0.0", } s3_base = { @@ -477,7 +477,7 @@ def get_long_description(): "druid", "elasticsearch", "feast" if sys.version_info >= (3, 8) else None, - "iceberg", + "iceberg" if sys.version_info >= (3, 8) else None, "json-schema", "ldap", "looker", @@ -530,7 +530,7 @@ def get_long_description(): "druid", "hana", "hive", - "iceberg", + "iceberg" if sys.version_info >= (3, 8) else None, "kafka-connect", "ldap", "mongodb", @@ -540,6 +540,7 @@ def get_long_description(): "redash", "vertica", ] + if plugin for dependency in plugins[plugin] ), } diff --git a/metadata-ingestion/src/datahub/ingestion/source/azure/__init__.py b/metadata-ingestion/src/datahub/ingestion/source/azure/__init__.py deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/metadata-ingestion/src/datahub/ingestion/source/azure/azure_common.py b/metadata-ingestion/src/datahub/ingestion/source/azure/azure_common.py deleted file mode 100644 index 1a48725330df9..0000000000000 --- a/metadata-ingestion/src/datahub/ingestion/source/azure/azure_common.py +++ /dev/null @@ -1,88 +0,0 @@ -from typing import Dict, Optional, Union - -from azure.identity import ClientSecretCredential -from azure.storage.filedatalake import DataLakeServiceClient, FileSystemClient -from pydantic import Field, root_validator - -from datahub.configuration import ConfigModel -from datahub.configuration.common import ConfigurationError - - -class AdlsSourceConfig(ConfigModel): - """ - Common Azure credentials config. - - https://docs.microsoft.com/en-us/azure/storage/blobs/data-lake-storage-directory-file-acl-python - """ - - base_path: str = Field( - default="/", - description="Base folder in hierarchical namespaces to start from.", - ) - container_name: str = Field( - description="Azure storage account container name.", - ) - account_name: str = Field( - description="Name of the Azure storage account. See [Microsoft official documentation on how to create a storage account.](https://docs.microsoft.com/en-us/azure/storage/blobs/create-data-lake-storage-account)", - ) - account_key: Optional[str] = Field( - description="Azure storage account access key that can be used as a credential. **An account key, a SAS token or a client secret is required for authentication.**", - default=None, - ) - sas_token: Optional[str] = Field( - description="Azure storage account Shared Access Signature (SAS) token that can be used as a credential. **An account key, a SAS token or a client secret is required for authentication.**", - default=None, - ) - client_secret: Optional[str] = Field( - description="Azure client secret that can be used as a credential. **An account key, a SAS token or a client secret is required for authentication.**", - default=None, - ) - client_id: Optional[str] = Field( - description="Azure client (Application) ID required when a `client_secret` is used as a credential.", - default=None, - ) - tenant_id: Optional[str] = Field( - description="Azure tenant (Directory) ID required when a `client_secret` is used as a credential.", - default=None, - ) - - def get_abfss_url(self, folder_path: str = "") -> str: - if not folder_path.startswith("/"): - folder_path = f"/{folder_path}" - return f"abfss://{self.container_name}@{self.account_name}.dfs.core.windows.net{folder_path}" - - def get_filesystem_client(self) -> FileSystemClient: - return self.get_service_client().get_file_system_client(self.container_name) - - def get_service_client(self) -> DataLakeServiceClient: - return DataLakeServiceClient( - account_url=f"https://{self.account_name}.dfs.core.windows.net", - credential=self.get_credentials(), - ) - - def get_credentials( - self, - ) -> Union[Optional[str], ClientSecretCredential]: - if self.client_id and self.client_secret and self.tenant_id: - return ClientSecretCredential( - tenant_id=self.tenant_id, - client_id=self.client_id, - client_secret=self.client_secret, - ) - return self.sas_token if self.sas_token is not None else self.account_key - - @root_validator() - def _check_credential_values(cls, values: Dict) -> Dict: - if ( - values.get("account_key") - or values.get("sas_token") - or ( - values.get("client_id") - and values.get("client_secret") - and values.get("tenant_id") - ) - ): - return values - raise ConfigurationError( - "credentials missing, requires one combination of account_key or sas_token or (client_id and client_secret and tenant_id)" - ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py index b7ae50eb766af..cc7f646dcb884 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py +++ b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg.py @@ -1,14 +1,37 @@ +import sys + +if sys.version_info < (3, 8): + raise ImportError("Iceberg is only supported on Python 3.8+") + import json import logging import uuid -from typing import Any, Dict, Iterable, List, Optional, Tuple - -from iceberg.api import types as IcebergTypes -from iceberg.api.table import Table -from iceberg.api.types.types import NestedField -from iceberg.core.base_table import BaseTable -from iceberg.core.filesystem.filesystem_tables import FilesystemTables -from iceberg.exceptions import NoSuchTableException +from typing import Any, Dict, Iterable, List, Optional + +from pyiceberg.catalog import Catalog +from pyiceberg.schema import Schema, SchemaVisitorPerPrimitiveType, visit +from pyiceberg.table import Table +from pyiceberg.typedef import Identifier +from pyiceberg.types import ( + BinaryType, + BooleanType, + DateType, + DecimalType, + DoubleType, + FixedType, + FloatType, + IntegerType, + ListType, + LongType, + MapType, + NestedField, + StringType, + StructType, + TimestampType, + TimestamptzType, + TimeType, + UUIDType, +) from datahub.emitter.mce_builder import ( make_data_platform_urn, @@ -59,23 +82,13 @@ LOGGER = logging.getLogger(__name__) -_all_atomic_types = { - IcebergTypes.BooleanType: "boolean", - IcebergTypes.IntegerType: "int", - IcebergTypes.LongType: "long", - IcebergTypes.FloatType: "float", - IcebergTypes.DoubleType: "double", - IcebergTypes.BinaryType: "bytes", - IcebergTypes.StringType: "string", -} - @platform_name("Iceberg") @support_status(SupportStatus.TESTING) @config_class(IcebergSourceConfig) @capability( SourceCapability.PLATFORM_INSTANCE, - "Optionally enabled via configuration, an Iceberg instance represents the datalake name where the table is stored.", + "Optionally enabled via configuration, an Iceberg instance represents the catalog name where the table is stored.", ) @capability(SourceCapability.DOMAINS, "Currently not supported.", supported=False) @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration.") @@ -95,16 +108,7 @@ class IcebergSource(StatefulIngestionSourceBase): The DataHub Iceberg source plugin extracts metadata from [Iceberg tables](https://iceberg.apache.org/spec/) stored in a distributed or local file system. Typically, Iceberg tables are stored in a distributed file system like S3 or Azure Data Lake Storage (ADLS) and registered in a catalog. There are various catalog implementations like Filesystem-based, RDBMS-based or even REST-based catalogs. This Iceberg source plugin relies on the - [Iceberg python_legacy library](https://github.com/apache/iceberg/tree/master/python_legacy) and its support for catalogs is limited at the moment. - A new version of the [Iceberg Python library](https://github.com/apache/iceberg/tree/master/python) is currently in development and should fix this. - Because of this limitation, this source plugin **will only ingest HadoopCatalog-based tables that have a `version-hint.text` metadata file**. - - Ingestion of tables happens in 2 steps: - 1. Discover Iceberg tables stored in file system. - 2. Load discovered tables using Iceberg python_legacy library - - The current implementation of the Iceberg source plugin will only discover tables stored in a local file system or in ADLS. Support for S3 could - be added fairly easily. + [pyiceberg library](https://py.iceberg.apache.org/). """ def __init__(self, config: IcebergSourceConfig, ctx: PipelineContext) -> None: @@ -112,7 +116,6 @@ def __init__(self, config: IcebergSourceConfig, ctx: PipelineContext) -> None: self.platform: str = "iceberg" self.report: IcebergSourceReport = IcebergSourceReport() self.config: IcebergSourceConfig = config - self.iceberg_client: FilesystemTables = config.filesystem_tables @classmethod def create(cls, config_dict: Dict, ctx: PipelineContext) -> "IcebergSource": @@ -127,23 +130,31 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: ).workunit_processor, ] + def _get_datasets(self, catalog: Catalog) -> Iterable[Identifier]: + for namespace in catalog.list_namespaces(): + yield from catalog.list_tables(namespace) + def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: - for dataset_path, dataset_name in self.config.get_paths(): # Tuple[str, str] - try: - if not self.config.table_pattern.allowed(dataset_name): - # Path contained a valid Iceberg table, but is rejected by pattern. - self.report.report_dropped(dataset_name) - continue + try: + catalog = self.config.get_catalog() + except Exception as e: + LOGGER.error("Failed to get catalog", exc_info=True) + self.report.report_failure( + "get-catalog", f"Failed to get catalog {self.config.catalog.name}: {e}" + ) + return + + for dataset_path in self._get_datasets(catalog): + dataset_name = ".".join(dataset_path) + if not self.config.table_pattern.allowed(dataset_name): + # Dataset name is rejected by pattern, report as dropped. + self.report.report_dropped(dataset_name) + continue - # Try to load an Iceberg table. Might not contain one, this will be caught by NoSuchTableException. - table: Table = self.iceberg_client.load(dataset_path) + try: + # Try to load an Iceberg table. Might not contain one, this will be caught by NoSuchIcebergTableError. + table = catalog.load_table(dataset_path) yield from self._create_iceberg_workunit(dataset_name, table) - except NoSuchTableException: - # Path did not contain a valid Iceberg table. Silently ignore this. - LOGGER.debug( - f"Path {dataset_path} does not contain table {dataset_name}" - ) - pass except Exception as e: self.report.report_failure("general", f"Failed to create workunit: {e}") LOGGER.exception( @@ -165,26 +176,21 @@ def _create_iceberg_workunit( aspects=[Status(removed=False)], ) - custom_properties: Dict = dict(table.properties()) - custom_properties["location"] = table.location() - try: - if isinstance(table, BaseTable) and table.current_snapshot(): - custom_properties["snapshot-id"] = str( - table.current_snapshot().snapshot_id - ) - custom_properties[ - "manifest-list" - ] = table.current_snapshot().manifest_location - except KeyError: - # The above API is not well implemented, and can throw KeyError when there is no data. - pass + # Dataset properties aspect. + custom_properties = table.metadata.properties.copy() + custom_properties["location"] = table.metadata.location + custom_properties["format-version"] = str(table.metadata.format_version) + if table.current_snapshot(): + custom_properties["snapshot-id"] = str(table.current_snapshot().snapshot_id) + custom_properties["manifest-list"] = table.current_snapshot().manifest_list dataset_properties = DatasetPropertiesClass( tags=[], - description=table.properties().get("comment", None), + description=table.metadata.properties.get("comment", None), customProperties=custom_properties, ) dataset_snapshot.aspects.append(dataset_properties) + # Dataset ownership aspect. dataset_ownership = self._get_ownership_aspect(table) if dataset_ownership: dataset_snapshot.aspects.append(dataset_ownership) @@ -206,8 +212,10 @@ def _create_iceberg_workunit( def _get_ownership_aspect(self, table: Table) -> Optional[OwnershipClass]: owners = [] if self.config.user_ownership_property: - if self.config.user_ownership_property in table.properties(): - user_owner = table.properties()[self.config.user_ownership_property] + if self.config.user_ownership_property in table.metadata.properties: + user_owner = table.metadata.properties[ + self.config.user_ownership_property + ] owners.append( OwnerClass( owner=make_user_urn(user_owner), @@ -216,8 +224,10 @@ def _get_ownership_aspect(self, table: Table) -> Optional[OwnershipClass]: ) ) if self.config.group_ownership_property: - if self.config.group_ownership_property in table.properties(): - group_owner = table.properties()[self.config.group_ownership_property] + if self.config.group_ownership_property in table.metadata.properties: + group_owner = table.metadata.properties[ + self.config.group_ownership_property + ] owners.append( OwnerClass( owner=make_group_urn(group_owner), @@ -225,9 +235,7 @@ def _get_ownership_aspect(self, table: Table) -> Optional[OwnershipClass]: source=None, ) ) - if owners: - return OwnershipClass(owners=owners) - return None + return OwnershipClass(owners=owners) if owners else None def _get_dataplatform_instance_aspect( self, dataset_urn: str @@ -249,191 +257,171 @@ def _get_dataplatform_instance_aspect( def _create_schema_metadata( self, dataset_name: str, table: Table ) -> SchemaMetadata: - schema_fields: List[SchemaField] = self._get_schema_fields( - table.schema().columns() - ) + schema_fields = self._get_schema_fields_for_schema(table.schema()) schema_metadata = SchemaMetadata( schemaName=dataset_name, platform=make_data_platform_urn(self.platform), version=0, hash="", - platformSchema=OtherSchema(rawSchema=repr(table.schema())), + platformSchema=OtherSchema(rawSchema=str(table.schema())), fields=schema_fields, ) return schema_metadata - def _get_schema_fields(self, columns: Tuple) -> List[SchemaField]: - canonical_schema: List[SchemaField] = [] - for column in columns: - fields = self._get_schema_fields_for_column(column) - canonical_schema.extend(fields) - return canonical_schema - - def _get_schema_fields_for_column( + def _get_schema_fields_for_schema( self, - column: NestedField, + schema: Schema, ) -> List[SchemaField]: - field_type: IcebergTypes.Type = column.type - if field_type.is_primitive_type() or field_type.is_nested_type(): - avro_schema: Dict = self._get_avro_schema_from_data_type(column) - schema_fields: List[SchemaField] = schema_util.avro_schema_to_mce_fields( - json.dumps(avro_schema), default_nullable=column.is_optional - ) - return schema_fields + avro_schema = visit(schema, ToAvroSchemaIcebergVisitor()) + schema_fields = schema_util.avro_schema_to_mce_fields( + json.dumps(avro_schema), default_nullable=False + ) + return schema_fields + + def get_report(self) -> SourceReport: + return self.report + + +class ToAvroSchemaIcebergVisitor(SchemaVisitorPerPrimitiveType[Dict[str, Any]]): + """Implementation of a visitor to build an Avro schema as a dictionary from an Iceberg schema.""" - raise ValueError(f"Invalid Iceberg field type: {field_type}") + @staticmethod + def _gen_name(prefix: str) -> str: + return f"{prefix}{str(uuid.uuid4()).replace('-', '')}" - def _get_avro_schema_from_data_type(self, column: NestedField) -> Dict[str, Any]: - """ - See Iceberg documentation for Avro mapping: - https://iceberg.apache.org/#spec/#appendix-a-format-specific-requirements - """ - # The record structure represents the dataset level. - # The inner fields represent the complex field (struct/array/map/union). + def schema(self, schema: Schema, struct_result: Dict[str, Any]) -> Dict[str, Any]: + return struct_result + + def struct( + self, struct: StructType, field_results: List[Dict[str, Any]] + ) -> Dict[str, Any]: + nullable = True return { "type": "record", - "name": "__struct_", - "fields": [ - { - "name": column.name, - "type": _parse_datatype(column.type, column.is_optional), - "doc": column.doc, - } - ], + "name": self._gen_name("__struct_"), + "fields": field_results, + "native_data_type": str(struct), + "_nullable": nullable, } - def get_report(self) -> SourceReport: - return self.report - + def field(self, field: NestedField, field_result: Dict[str, Any]) -> Dict[str, Any]: + field_result["_nullable"] = not field.required + return { + "name": field.name, + "type": field_result, + "doc": field.doc, + } -def _parse_datatype(type: IcebergTypes.Type, nullable: bool = False) -> Dict[str, Any]: - # Check for complex types: struct, list, map - if type.is_list_type(): - list_type: IcebergTypes.ListType = type + def list( + self, list_type: ListType, element_result: Dict[str, Any] + ) -> Dict[str, Any]: return { "type": "array", - "items": _parse_datatype(list_type.element_type), - "native_data_type": str(type), - "_nullable": nullable, + "items": element_result, + "native_data_type": str(list_type), + "_nullable": not list_type.element_required, } - elif type.is_map_type(): + + def map( + self, + map_type: MapType, + key_result: Dict[str, Any], + value_result: Dict[str, Any], + ) -> Dict[str, Any]: # The Iceberg Map type will be handled differently. The idea is to translate the map # similar to the Map.Entry struct of Java i.e. as an array of map_entry struct, where # the map_entry struct has a key field and a value field. The key and value type can # be complex or primitive types. - map_type: IcebergTypes.MapType = type - map_entry: Dict[str, Any] = { + key_result["_nullable"] = False + value_result["_nullable"] = not map_type.value_required + map_entry = { "type": "record", - "name": _gen_name("__map_entry_"), + "name": self._gen_name("__map_entry_"), "fields": [ { "name": "key", - "type": _parse_datatype(map_type.key_type(), False), + "type": key_result, }, { "name": "value", - "type": _parse_datatype(map_type.value_type(), True), + "type": value_result, }, ], } return { "type": "array", "items": map_entry, - "native_data_type": str(type), - "_nullable": nullable, + "native_data_type": str(map_type), } - elif type.is_struct_type(): - structType: IcebergTypes.StructType = type - return _parse_struct_fields(structType.fields, nullable) - else: - # Primitive types - return _parse_basic_datatype(type, nullable) - - -def _parse_struct_fields(parts: Tuple[NestedField], nullable: bool) -> Dict[str, Any]: - fields = [] - for nested_field in parts: # type: NestedField - field_name = nested_field.name - field_type = _parse_datatype(nested_field.type, nested_field.is_optional) - fields.append({"name": field_name, "type": field_type, "doc": nested_field.doc}) - return { - "type": "record", - "name": _gen_name("__struct_"), - "fields": fields, - "native_data_type": "struct<{}>".format(parts), - "_nullable": nullable, - } - - -def _parse_basic_datatype( - type: IcebergTypes.PrimitiveType, nullable: bool -) -> Dict[str, Any]: - """ - See https://iceberg.apache.org/#spec/#avro - """ - # Check for an atomic types. - for iceberg_type in _all_atomic_types.keys(): - if isinstance(type, iceberg_type): - return { - "type": _all_atomic_types[iceberg_type], - "native_data_type": repr(type), - "_nullable": nullable, - } - - # Fixed is a special case where it is not an atomic type and not a logical type. - if isinstance(type, IcebergTypes.FixedType): - fixed_type: IcebergTypes.FixedType = type + + def visit_fixed(self, fixed_type: FixedType) -> Dict[str, Any]: return { "type": "fixed", - "name": _gen_name("__fixed_"), - "size": fixed_type.length, - "native_data_type": repr(fixed_type), - "_nullable": nullable, + "name": self._gen_name("__fixed_"), + "size": len(fixed_type), + "native_data_type": str(fixed_type), } - # Not an atomic type, so check for a logical type. - if isinstance(type, IcebergTypes.DecimalType): + def visit_decimal(self, decimal_type: DecimalType) -> Dict[str, Any]: # Also of interest: https://avro.apache.org/docs/current/spec.html#Decimal - decimal_type: IcebergTypes.DecimalType = type return { # "type": "bytes", # when using bytes, avro drops _nullable attribute and others. See unit test. "type": "fixed", # to fix avro bug ^ resolved by using a fixed type - "name": _gen_name( + "name": self._gen_name( "__fixed_" ), # to fix avro bug ^ resolved by using a fixed type "size": 1, # to fix avro bug ^ resolved by using a fixed type "logicalType": "decimal", "precision": decimal_type.precision, "scale": decimal_type.scale, - "native_data_type": repr(decimal_type), - "_nullable": nullable, + "native_data_type": str(decimal_type), + } + + def visit_boolean(self, boolean_type: BooleanType) -> Dict[str, Any]: + return { + "type": "boolean", + "native_data_type": str(boolean_type), } - elif isinstance(type, IcebergTypes.UUIDType): - uuid_type: IcebergTypes.UUIDType = type + + def visit_integer(self, integer_type: IntegerType) -> Dict[str, Any]: return { - "type": "string", - "logicalType": "uuid", - "native_data_type": repr(uuid_type), - "_nullable": nullable, + "type": "int", + "native_data_type": str(integer_type), + } + + def visit_long(self, long_type: LongType) -> Dict[str, Any]: + return { + "type": "long", + "native_data_type": str(long_type), } - elif isinstance(type, IcebergTypes.DateType): - date_type: IcebergTypes.DateType = type + + def visit_float(self, float_type: FloatType) -> Dict[str, Any]: + return { + "type": "float", + "native_data_type": str(float_type), + } + + def visit_double(self, double_type: DoubleType) -> Dict[str, Any]: + return { + "type": "double", + "native_data_type": str(double_type), + } + + def visit_date(self, date_type: DateType) -> Dict[str, Any]: return { "type": "int", "logicalType": "date", - "native_data_type": repr(date_type), - "_nullable": nullable, + "native_data_type": str(date_type), } - elif isinstance(type, IcebergTypes.TimeType): - time_type: IcebergTypes.TimeType = type + + def visit_time(self, time_type: TimeType) -> Dict[str, Any]: return { "type": "long", "logicalType": "time-micros", - "native_data_type": repr(time_type), - "_nullable": nullable, + "native_data_type": str(time_type), } - elif isinstance(type, IcebergTypes.TimestampType): - timestamp_type: IcebergTypes.TimestampType = type + + def visit_timestamp(self, timestamp_type: TimestampType) -> Dict[str, Any]: # Avro supports 2 types of timestamp: # - Timestamp: independent of a particular timezone or calendar (TZ information is lost) # - Local Timestamp: represents a timestamp in a local timezone, regardless of what specific time zone is considered local @@ -446,12 +434,40 @@ def _parse_basic_datatype( # "logicalType": "timestamp-micros" # if timestamp_type.adjust_to_utc # else "local-timestamp-micros", - "native_data_type": repr(timestamp_type), - "_nullable": nullable, + "native_data_type": str(timestamp_type), } - return {"type": "null", "native_data_type": repr(type)} + def visit_timestampz(self, timestamptz_type: TimestamptzType) -> Dict[str, Any]: + # Avro supports 2 types of timestamp: + # - Timestamp: independent of a particular timezone or calendar (TZ information is lost) + # - Local Timestamp: represents a timestamp in a local timezone, regardless of what specific time zone is considered local + # utcAdjustment: bool = True + return { + "type": "long", + "logicalType": "timestamp-micros", + # Commented out since Avro's Python implementation (1.11.0) does not support local-timestamp-micros, even though it exists in the spec. + # See bug report: https://issues.apache.org/jira/browse/AVRO-3476 and PR https://github.com/apache/avro/pull/1634 + # "logicalType": "timestamp-micros" + # if timestamp_type.adjust_to_utc + # else "local-timestamp-micros", + "native_data_type": str(timestamptz_type), + } + def visit_string(self, string_type: StringType) -> Dict[str, Any]: + return { + "type": "string", + "native_data_type": str(string_type), + } -def _gen_name(prefix: str) -> str: - return f"{prefix}{str(uuid.uuid4()).replace('-', '')}" + def visit_uuid(self, uuid_type: UUIDType) -> Dict[str, Any]: + return { + "type": "string", + "logicalType": "uuid", + "native_data_type": str(uuid_type), + } + + def visit_binary(self, binary_type: BinaryType) -> Dict[str, Any]: + return { + "type": "bytes", + "native_data_type": str(binary_type), + } diff --git a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg_common.py b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg_common.py index d5b9092912d4e..f4d93f67b27af 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg_common.py @@ -1,20 +1,11 @@ -import os from dataclasses import dataclass, field -from typing import Dict, Iterable, List, Optional, Tuple - -import pydantic -from azure.storage.filedatalake import FileSystemClient, PathProperties -from iceberg.core.filesystem.abfss_filesystem import AbfssFileSystem -from iceberg.core.filesystem.filesystem_tables import FilesystemTables -from pydantic import Field, root_validator - -from datahub.configuration.common import ( - AllowDenyPattern, - ConfigModel, - ConfigurationError, -) +from typing import Dict, List, Optional + +from pydantic import Field +from pyiceberg.catalog import Catalog, load_catalog + +from datahub.configuration.common import AllowDenyPattern, ConfigModel from datahub.configuration.source_common import DatasetSourceConfigMixin -from datahub.ingestion.source.azure.azure_common import AdlsSourceConfig from datahub.ingestion.source.state.stale_entity_removal_handler import ( StaleEntityRemovalSourceReport, StatefulStaleMetadataRemovalConfig, @@ -59,22 +50,32 @@ class IcebergProfilingConfig(ConfigModel): # include_field_sample_values: bool = True +class IcebergCatalogConfig(ConfigModel): + """ + Iceberg catalog config. + + https://py.iceberg.apache.org/configuration/ + """ + + name: str = Field( + default="default", + description="Name of catalog", + ) + type: str = Field( + description="Type of catalog. See [PyIceberg](https://py.iceberg.apache.org/configuration/) for list of possible values.", + ) + config: Dict[str, str] = Field( + description="Catalog specific configuration. See [PyIceberg documentation](https://py.iceberg.apache.org/configuration/) for details.", + ) + + class IcebergSourceConfig(StatefulIngestionConfigBase, DatasetSourceConfigMixin): # Override the stateful_ingestion config param with the Iceberg custom stateful ingestion config in the IcebergSourceConfig - stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = pydantic.Field( + stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = Field( default=None, description="Iceberg Stateful Ingestion Config." ) - adls: Optional[AdlsSourceConfig] = Field( - default=None, - description="[Azure Data Lake Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/data-lake-storage-introduction) to crawl for Iceberg tables. This is one filesystem type supported by this source and **only one can be configured**.", - ) - localfs: Optional[str] = Field( - default=None, - description="Local path to crawl for Iceberg tables. This is one filesystem type supported by this source and **only one can be configured**.", - ) - max_path_depth: int = Field( - default=2, - description="Maximum folder depth to crawl for Iceberg tables. Folders deeper than this value will be silently ignored.", + catalog: IcebergCatalogConfig = Field( + description="Catalog configuration where to find Iceberg tables. See [pyiceberg's catalog configuration details](https://py.iceberg.apache.org/configuration/).", ) table_pattern: AllowDenyPattern = Field( default=AllowDenyPattern.allow_all(), @@ -95,92 +96,15 @@ def is_profiling_enabled(self) -> bool: self.profiling.operation_config ) - @root_validator() - def _ensure_one_filesystem_is_configured( - cls: "IcebergSourceConfig", values: Dict - ) -> Dict: - if values.get("adls") and values.get("localfs"): - raise ConfigurationError( - "Only one filesystem can be configured: adls or localfs" - ) - elif not values.get("adls") and not values.get("localfs"): - raise ConfigurationError( - "One filesystem (adls or localfs) needs to be configured." - ) - return values - - @property - def adls_filesystem_client(self) -> FileSystemClient: - """Azure Filesystem client if configured. - - Raises: - ConfigurationError: If ADLS is not configured. - - Returns: - FileSystemClient: Azure Filesystem client instance to access storage account files and folders. - """ - if self.adls: # TODO Use local imports for abfss - AbfssFileSystem.get_instance().set_conf(self.adls.dict()) - return self.adls.get_filesystem_client() - raise ConfigurationError("No ADLS filesystem client configured") - - @property - def filesystem_tables(self) -> FilesystemTables: - """Iceberg FilesystemTables abstraction to access tables on a filesystem. - Currently supporting ADLS (Azure Storage Account) and local filesystem. - - Raises: - ConfigurationError: If no filesystem was configured. + def get_catalog(self) -> Catalog: + """Returns the Iceberg catalog instance as configured by the `catalog` dictionary. Returns: - FilesystemTables: An Iceberg FilesystemTables abstraction instance to access tables on a filesystem + Catalog: Iceberg catalog instance. """ - if self.adls: - return FilesystemTables(self.adls.dict()) - elif self.localfs: - return FilesystemTables() - raise ConfigurationError("No filesystem client configured") - - def _get_adls_paths(self, root_path: str, depth: int) -> Iterable[Tuple[str, str]]: - if self.adls and depth < self.max_path_depth: - sub_paths = self.adls_filesystem_client.get_paths( - path=root_path, recursive=False - ) - sub_path: PathProperties - for sub_path in sub_paths: - if sub_path.is_directory: - dataset_name = ".".join( - sub_path.name[len(self.adls.base_path) + 1 :].split("/") - ) - yield self.adls.get_abfss_url(sub_path.name), dataset_name - yield from self._get_adls_paths(sub_path.name, depth + 1) - - def _get_localfs_paths( - self, root_path: str, depth: int - ) -> Iterable[Tuple[str, str]]: - if self.localfs and depth < self.max_path_depth: - for f in os.scandir(root_path): - if f.is_dir(): - dataset_name = ".".join(f.path[len(self.localfs) + 1 :].split("/")) - yield f.path, dataset_name - yield from self._get_localfs_paths(f.path, depth + 1) - - def get_paths(self) -> Iterable[Tuple[str, str]]: - """Generates a sequence of data paths and dataset names. - - Raises: - ConfigurationError: If no filesystem configured. - - Yields: - Iterator[Iterable[Tuple[str, str]]]: A sequence of tuples where the first item is the location of the dataset - and the second item is the associated dataset name. - """ - if self.adls: - yield from self._get_adls_paths(self.adls.base_path, 0) - elif self.localfs: - yield from self._get_localfs_paths(self.localfs, 0) - else: - raise ConfigurationError("No filesystem client configured") + return load_catalog( + name=self.catalog.name, **{"type": self.catalog.type, **self.catalog.config} + ) @dataclass diff --git a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg_profiler.py index 1437847ee4343..e1d52752d779a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/iceberg/iceberg_profiler.py @@ -1,17 +1,26 @@ -from datetime import datetime, timedelta from typing import Any, Callable, Dict, Iterable, Union, cast -from iceberg.api import types as IcebergTypes -from iceberg.api.data_file import DataFile -from iceberg.api.manifest_file import ManifestFile -from iceberg.api.schema import Schema -from iceberg.api.snapshot import Snapshot -from iceberg.api.table import Table -from iceberg.api.types import Conversions, NestedField, Type, TypeID -from iceberg.core.base_table import BaseTable -from iceberg.core.filesystem import FileSystemInputFile -from iceberg.core.manifest_reader import ManifestReader -from iceberg.exceptions.exceptions import FileSystemNotFound +from pyiceberg.conversions import from_bytes +from pyiceberg.schema import Schema +from pyiceberg.table import Table +from pyiceberg.types import ( + DateType, + DecimalType, + DoubleType, + FloatType, + IcebergType, + IntegerType, + LongType, + TimestampType, + TimestamptzType, + TimeType, +) +from pyiceberg.utils.datetime import ( + days_to_date, + to_human_time, + to_human_timestamp, + to_human_timestamptz, +) from datahub.emitter.mce_builder import get_sys_time from datahub.emitter.mcp import MetadataChangeProposalWrapper @@ -51,15 +60,18 @@ def _aggregate_bounds( schema: Schema, aggregator: Callable, aggregated_values: Dict[int, Any], - manifest_values: Dict[int, Any], + manifest_values: Dict[int, bytes], ) -> None: for field_id, value_encoded in manifest_values.items(): # type: int, Any - field: NestedField = schema.find_field(field_id) - # Bounds in manifests can reference historical field IDs that are not part of the current schema. - # We simply not profile those since we only care about the current snapshot. - if field and IcebergProfiler._is_numeric_type(field.type): - value_decoded = Conversions.from_byte_buffer(field.type, value_encoded) - if value_decoded: + try: + field = schema.find_field(field_id) + except ValueError: + # Bounds in manifests can reference historical field IDs that are not part of the current schema. + # We simply not profile those since we only care about the current snapshot. + continue + if IcebergProfiler._is_numeric_type(field.field_type): + value_decoded = from_bytes(field.field_type, value_encoded) + if value_decoded is not None: agg_value = aggregated_values.get(field_id) aggregated_values[field_id] = ( aggregator(agg_value, value_decoded) @@ -97,12 +109,23 @@ def profile_table( Yields: Iterator[Iterable[MetadataWorkUnit]]: Workunits related to datasetProfile. """ - if not table.snapshots() or not isinstance(table, BaseTable): + current_snapshot = table.current_snapshot() + if not current_snapshot: # Table has no data, cannot profile, or we can't get current_snapshot. return - row_count: int = int(table.current_snapshot().summary["total-records"]) - column_count: int = len(table.schema()._id_to_name) + row_count = ( + int(current_snapshot.summary.additional_properties["total-records"]) + if current_snapshot.summary + else 0 + ) + column_count = len( + [ + field.field_id + for field in table.schema().fields + if field.field_type.is_primitive + ] + ) dataset_profile = DatasetProfileClass( timestampMillis=get_sys_time(), rowCount=row_count, @@ -110,47 +133,44 @@ def profile_table( ) dataset_profile.fieldProfiles = [] - field_paths: Dict[int, str] = table.schema()._id_to_name - current_snapshot: Snapshot = table.current_snapshot() - total_count: int = 0 + total_count = 0 null_counts: Dict[int, int] = {} min_bounds: Dict[int, Any] = {} max_bounds: Dict[int, Any] = {} - manifest: ManifestFile try: - for manifest in current_snapshot.manifests: - manifest_input_file = FileSystemInputFile.from_location( - manifest.manifest_path, table.ops.conf - ) - manifest_reader = ManifestReader.read(manifest_input_file) - data_file: DataFile - for data_file in manifest_reader.iterator(): + for manifest in current_snapshot.manifests(table.io): + for manifest_entry in manifest.fetch_manifest_entry(table.io): + data_file = manifest_entry.data_file if self.config.include_field_null_count: null_counts = self._aggregate_counts( - null_counts, data_file.null_value_counts() + null_counts, data_file.null_value_counts ) if self.config.include_field_min_value: self._aggregate_bounds( table.schema(), min, min_bounds, - data_file.lower_bounds(), + data_file.lower_bounds, ) if self.config.include_field_max_value: self._aggregate_bounds( table.schema(), max, max_bounds, - data_file.upper_bounds(), + data_file.upper_bounds, ) - total_count += data_file.record_count() - # TODO Work on error handling to provide better feedback. Iceberg exceptions are weak... - except FileSystemNotFound as e: - raise Exception("Error loading table manifests") from e + total_count += data_file.record_count + except Exception as e: + # Catch any errors that arise from attempting to read the Iceberg table's manifests + # This will prevent stateful ingestion from being blocked by an error (profiling is not critical) + self.report.report_warning( + "profiling", + f"Error while profiling dataset {dataset_name}: {e}", + ) if row_count: # Iterating through fieldPaths introduces unwanted stats for list element fields... - for field_id, field_path in field_paths.items(): - field: NestedField = table.schema().find_field(field_id) + for field_path, field_id in table.schema()._name_to_id.items(): + field = table.schema().find_field(field_id) column_profile = DatasetFieldProfileClass(fieldPath=field_path) if self.config.include_field_null_count: column_profile.nullCount = cast(int, null_counts.get(field_id, 0)) @@ -160,16 +180,16 @@ def profile_table( if self.config.include_field_min_value: column_profile.min = ( - self._renderValue( - dataset_name, field.type, min_bounds.get(field_id) + self._render_value( + dataset_name, field.field_type, min_bounds.get(field_id) ) if field_id in min_bounds else None ) if self.config.include_field_max_value: column_profile.max = ( - self._renderValue( - dataset_name, field.type, max_bounds.get(field_id) + self._render_value( + dataset_name, field.field_type, max_bounds.get(field_id) ) if field_id in max_bounds else None @@ -181,24 +201,18 @@ def profile_table( aspect=dataset_profile, ).as_workunit() - # The following will eventually be done by the Iceberg API (in the new Python refactored API). - def _renderValue( - self, dataset_name: str, value_type: Type, value: Any + def _render_value( + self, dataset_name: str, value_type: IcebergType, value: Any ) -> Union[str, None]: try: - if value_type.type_id == TypeID.TIMESTAMP: - if value_type.adjust_to_utc: - # TODO Deal with utc when required - microsecond_unix_ts = value - else: - microsecond_unix_ts = value - return datetime.fromtimestamp(microsecond_unix_ts / 1000000.0).strftime( - "%Y-%m-%d %H:%M:%S" - ) - elif value_type.type_id == TypeID.DATE: - return (datetime(1970, 1, 1, 0, 0) + timedelta(value - 1)).strftime( - "%Y-%m-%d" - ) + if isinstance(value_type, TimestampType): + return to_human_timestamp(value) + if isinstance(value_type, TimestamptzType): + return to_human_timestamptz(value) + elif isinstance(value_type, DateType): + return days_to_date(value).strftime("%Y-%m-%d") + elif isinstance(value_type, TimeType): + return to_human_time(value) return str(value) except Exception as e: self.report.report_warning( @@ -208,17 +222,18 @@ def _renderValue( return None @staticmethod - def _is_numeric_type(type: Type) -> bool: + def _is_numeric_type(type: IcebergType) -> bool: return isinstance( type, ( - IcebergTypes.DateType, - IcebergTypes.DecimalType, - IcebergTypes.DoubleType, - IcebergTypes.FloatType, - IcebergTypes.IntegerType, - IcebergTypes.LongType, - IcebergTypes.TimestampType, - IcebergTypes.TimeType, + DateType, + DecimalType, + DoubleType, + FloatType, + IntegerType, + LongType, + TimestampType, + TimestamptzType, + TimeType, ), ) diff --git a/metadata-ingestion/tests/integration/iceberg/.gitignore b/metadata-ingestion/tests/integration/iceberg/.gitignore new file mode 100644 index 0000000000000..a7dfcf56788b4 --- /dev/null +++ b/metadata-ingestion/tests/integration/iceberg/.gitignore @@ -0,0 +1,3 @@ +# Folders created by Iceberg's docker-compose +notebooks/ +warehouse/ \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/docker-compose.yml b/metadata-ingestion/tests/integration/iceberg/docker-compose.yml new file mode 100644 index 0000000000000..ab5c534e7289b --- /dev/null +++ b/metadata-ingestion/tests/integration/iceberg/docker-compose.yml @@ -0,0 +1,74 @@ +version: "3" + +services: + spark-iceberg: + image: tabulario/spark-iceberg:3.3.2_1.3.0 + container_name: spark-iceberg + networks: + iceberg_net: + depends_on: + - rest + - minio + volumes: + - ./warehouse:/home/iceberg/warehouse + - ./notebooks:/home/iceberg/notebooks/notebooks + - ./setup:/home/iceberg/setup + environment: + - AWS_ACCESS_KEY_ID=admin + - AWS_SECRET_ACCESS_KEY=password + - AWS_REGION=us-east-1 + ports: + - 8888:8888 + - 8080:8080 + - 10000:10000 + - 10001:10001 + rest: + image: tabulario/iceberg-rest:0.5.0 + container_name: iceberg-rest + networks: + iceberg_net: + ports: + - 8181:8181 + environment: + - AWS_ACCESS_KEY_ID=admin + - AWS_SECRET_ACCESS_KEY=password + - AWS_REGION=us-east-1 + - CATALOG_WAREHOUSE=s3a://warehouse/wh/ + - CATALOG_IO__IMPL=org.apache.iceberg.aws.s3.S3FileIO + - CATALOG_S3_ENDPOINT=http://minio:9000 + minio: + image: minio/minio + container_name: minio + environment: + - MINIO_ROOT_USER=admin + - MINIO_ROOT_PASSWORD=password + - MINIO_DOMAIN=minio + networks: + iceberg_net: + aliases: + - warehouse.minio + ports: + - 9001:9001 + - 9000:9000 + command: ["server", "/data", "--console-address", ":9001"] + mc: + depends_on: + - minio + image: minio/mc + container_name: mc + networks: + iceberg_net: + environment: + - AWS_ACCESS_KEY_ID=admin + - AWS_SECRET_ACCESS_KEY=password + - AWS_REGION=us-east-1 + entrypoint: > + /bin/sh -c " + until (/usr/bin/mc config host add minio http://minio:9000 admin password) do echo '...waiting...' && sleep 1; done; + /usr/bin/mc rm -r --force minio/warehouse; + /usr/bin/mc mb minio/warehouse; + /usr/bin/mc policy set public minio/warehouse; + exit 0; + " +networks: + iceberg_net: diff --git a/metadata-ingestion/tests/integration/iceberg/iceberg_deleted_table_mces_golden.json b/metadata-ingestion/tests/integration/iceberg/iceberg_deleted_table_mces_golden.json new file mode 100644 index 0000000000000..cc94625560a43 --- /dev/null +++ b/metadata-ingestion/tests/integration/iceberg/iceberg_deleted_table_mces_golden.json @@ -0,0 +1,184 @@ +[ + { + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,test_platform_instance.nyc.another_taxis,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "owner": "root", + "created-at": "2023-07-04T14:23:10.457317300Z", + "write.format.default": "parquet", + "location": "s3a://warehouse/wh/nyc/another_taxis", + "format-version": "1", + "snapshot-id": "6904764113937987369", + "manifest-list": "s3a://warehouse/wh/nyc/another_taxis/metadata/snap-6904764113937987369-1-f18ce54a-d59c-461a-a066-9d3085ccf2f2.avro" + }, + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:root", + "type": "TECHNICAL_OWNER" + }, + { + "owner": "urn:li:corpGroup:root", + "type": "TECHNICAL_OWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "nyc.another_taxis", + "platform": "urn:li:dataPlatform:iceberg", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "table {\n 1: vendor_id: optional long\n 2: trip_date: optional timestamptz\n 3: trip_id: optional long\n 4: trip_distance: optional float\n 5: fare_amount: optional double\n 6: store_and_fwd_flag: optional string\n}" + } + }, + "fields": [ + { + "fieldPath": "[version=2.0].[type=struct].[type=long].vendor_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "long", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"long\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=long].trip_date", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamptz", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"logicalType\": \"timestamp-micros\", \"native_data_type\": \"timestamptz\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=long].trip_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "long", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"long\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=float].trip_distance", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "float", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"float\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=double].fare_amount", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=string].store_and_fwd_flag", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "iceberg-2020_04_14-07_00_00" + } + }, + { + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,test_platform_instance.nyc.another_taxis,PROD)", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:iceberg", + "instance": "urn:li:dataPlatformInstance:(urn:li:dataPlatform:iceberg,test_platform_instance)" + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "iceberg-2020_04_14-07_00_00" + } + }, + { + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,test_platform_instance.nyc.taxis,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": true + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "iceberg-2020_04_14-07_00_00" + } + } + ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/iceberg_ingest_mces_golden.json b/metadata-ingestion/tests/integration/iceberg/iceberg_ingest_mces_golden.json new file mode 100644 index 0000000000000..163911623470e --- /dev/null +++ b/metadata-ingestion/tests/integration/iceberg/iceberg_ingest_mces_golden.json @@ -0,0 +1,153 @@ +[ + { + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,nyc.taxis,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "owner": "root", + "created-at": "2023-06-12T17:32:17.227545005Z", + "write.format.default": "parquet", + "location": "s3a://warehouse/wh/nyc/taxis", + "format-version": "1", + "snapshot-id": "2505818429184337337", + "manifest-list": "s3a://warehouse/wh/nyc/taxis/metadata/snap-2505818429184337337-1-a64915c4-afc8-40e3-97a7-98b072b42e10.avro" + }, + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:root", + "type": "TECHNICAL_OWNER" + }, + { + "owner": "urn:li:corpGroup:root", + "type": "TECHNICAL_OWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "nyc.taxis", + "platform": "urn:li:dataPlatform:iceberg", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "table {\n 1: vendor_id: optional long\n 2: trip_date: optional timestamptz\n 3: trip_id: optional long\n 4: trip_distance: optional float\n 5: fare_amount: optional double\n 6: store_and_fwd_flag: optional string\n}" + } + }, + "fields": [ + { + "fieldPath": "[version=2.0].[type=struct].[type=long].vendor_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "long", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"long\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=long].trip_date", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamptz", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"logicalType\": \"timestamp-micros\", \"native_data_type\": \"timestamptz\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=long].trip_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "long", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"long\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=float].trip_distance", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "float", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"float\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=double].fare_amount", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=string].store_and_fwd_flag", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "iceberg-test" + } + } + ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/iceberg_profile_mces_golden.json b/metadata-ingestion/tests/integration/iceberg/iceberg_profile_mces_golden.json new file mode 100644 index 0000000000000..bdb7091014626 --- /dev/null +++ b/metadata-ingestion/tests/integration/iceberg/iceberg_profile_mces_golden.json @@ -0,0 +1,216 @@ +[ + { + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { + "urn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,nyc.taxis,PROD)", + "aspects": [ + { + "com.linkedin.pegasus2avro.common.Status": { + "removed": false + } + }, + { + "com.linkedin.pegasus2avro.dataset.DatasetProperties": { + "customProperties": { + "owner": "root", + "created-at": "2023-06-12T17:33:25.422993540Z", + "write.format.default": "parquet", + "location": "s3a://warehouse/wh/nyc/taxis", + "format-version": "1", + "snapshot-id": "2585047006374307840", + "manifest-list": "s3a://warehouse/wh/nyc/taxis/metadata/snap-2585047006374307840-1-2e2bef19-40d1-4ad1-8fad-e57783477710.avro" + }, + "tags": [] + } + }, + { + "com.linkedin.pegasus2avro.common.Ownership": { + "owners": [ + { + "owner": "urn:li:corpuser:root", + "type": "TECHNICAL_OWNER" + }, + { + "owner": "urn:li:corpGroup:root", + "type": "TECHNICAL_OWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + { + "com.linkedin.pegasus2avro.schema.SchemaMetadata": { + "schemaName": "nyc.taxis", + "platform": "urn:li:dataPlatform:iceberg", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "hash": "", + "platformSchema": { + "com.linkedin.pegasus2avro.schema.OtherSchema": { + "rawSchema": "table {\n 1: vendor_id: optional long\n 2: trip_date: optional timestamptz\n 3: trip_id: optional long\n 4: trip_distance: optional float\n 5: fare_amount: optional double\n 6: store_and_fwd_flag: optional string\n}" + } + }, + "fields": [ + { + "fieldPath": "[version=2.0].[type=struct].[type=long].vendor_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "long", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"long\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=long].trip_date", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.TimeType": {} + } + }, + "nativeDataType": "timestamptz", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"logicalType\": \"timestamp-micros\", \"native_data_type\": \"timestamptz\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=long].trip_id", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "long", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"long\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=float].trip_distance", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "float", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"float\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=double].fare_amount", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=string].store_and_fwd_flag", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + } + ] + } + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "iceberg-test" + } + }, + { + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,nyc.taxis,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1586847600000, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 5, + "columnCount": 6, + "fieldProfiles": [ + { + "fieldPath": "vendor_id", + "nullCount": 0, + "nullProportion": 0.0, + "min": "1", + "max": "3" + }, + { + "fieldPath": "trip_date", + "nullCount": 0, + "nullProportion": 0.0, + "min": "2000-01-01T12:00:00+00:00", + "max": "2000-01-04T12:00:00+00:00" + }, + { + "fieldPath": "trip_id", + "nullCount": 0, + "nullProportion": 0.0, + "min": "1000371", + "max": "1000375" + }, + { + "fieldPath": "trip_distance", + "nullCount": 0, + "nullProportion": 0.0, + "min": "0.0", + "max": "8.399999618530273" + }, + { + "fieldPath": "fare_amount", + "nullCount": 0, + "nullProportion": 0.0, + "min": "0.0", + "max": "42.13" + }, + { + "fieldPath": "store_and_fwd_flag", + "nullCount": 0, + "nullProportion": 0.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1586847600000, + "runId": "iceberg-test" + } + } + ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/iceberg_profile_to_file.yml b/metadata-ingestion/tests/integration/iceberg/iceberg_profile_to_file.yml new file mode 100644 index 0000000000000..197c03bf2ee8d --- /dev/null +++ b/metadata-ingestion/tests/integration/iceberg/iceberg_profile_to_file.yml @@ -0,0 +1,25 @@ +run_id: iceberg-test + +source: + type: iceberg + config: + catalog: + name: default + type: rest + config: + uri: http://localhost:8181 + s3.access-key-id: admin + s3.secret-access-key: password + s3.region: us-east-1 + warehouse: s3a://warehouse/wh/ + py-io-impl: pyiceberg.io.pyarrow.PyArrowFileIO + s3.endpoint: http://localhost:9000 + user_ownership_property: owner + group_ownership_property: owner + profiling: + enabled: true + +sink: + type: file + config: + filename: "./iceberg_mces.json" diff --git a/metadata-ingestion/tests/integration/iceberg/iceberg_to_file.yml b/metadata-ingestion/tests/integration/iceberg/iceberg_to_file.yml new file mode 100644 index 0000000000000..8b5d035aed259 --- /dev/null +++ b/metadata-ingestion/tests/integration/iceberg/iceberg_to_file.yml @@ -0,0 +1,22 @@ +run_id: iceberg-test + +source: + type: iceberg + config: + catalog: + name: default + type: rest + config: + uri: http://localhost:8181 + s3.access-key-id: admin + s3.secret-access-key: password + s3.region: us-east-1 + warehouse: s3a://warehouse/wh/ + s3.endpoint: http://localhost:9000 + user_ownership_property: owner + group_ownership_property: owner + +sink: + type: file + config: + filename: "./iceberg_mces.json" diff --git a/metadata-ingestion/tests/integration/iceberg/setup/create.py b/metadata-ingestion/tests/integration/iceberg/setup/create.py new file mode 100644 index 0000000000000..0799ce9c93916 --- /dev/null +++ b/metadata-ingestion/tests/integration/iceberg/setup/create.py @@ -0,0 +1,46 @@ +import sys +from datetime import datetime + +from pyspark.sql import SparkSession +from pyspark.sql.types import ( + DoubleType, + FloatType, + LongType, + StringType, + StructField, + StructType, + TimestampType, +) + + +def main(table_name: str) -> None: + spark = SparkSession.builder.getOrCreate() + + schema = StructType( + [ + StructField("vendor_id", LongType(), True), + StructField("trip_date", TimestampType(), True), + StructField("trip_id", LongType(), True), + StructField("trip_distance", FloatType(), True), + StructField("fare_amount", DoubleType(), True), + StructField("store_and_fwd_flag", StringType(), True), + ] + ) + + data = [ + (1, datetime(2000, 1, 1, 12, 0), 1000371, 1.8, 15.32, "N"), + (2, datetime(2000, 1, 2, 12, 0), 1000372, 2.5, 22.15, "N"), + (2, datetime(2000, 1, 3, 12, 0), 1000373, 0.9, 9.01, "N"), + (1, datetime(2000, 1, 4, 12, 0), 1000374, 8.4, 42.13, "Y"), + # Following entry will test profiling values at 0 + (3, datetime(2000, 1, 4, 12, 0), 1000375, 0.0, 0.0, "Y"), + ] + + df = spark.createDataFrame(data, schema) + df.write.partitionBy("trip_date").saveAsTable(table_name) + + +if __name__ == "__main__": + if len(sys.argv) != 2: + raise ValueError("Missing required parameter ") + main(sys.argv[1]) diff --git a/metadata-ingestion/tests/integration/iceberg/setup/delete.py b/metadata-ingestion/tests/integration/iceberg/setup/delete.py new file mode 100644 index 0000000000000..b00306982f517 --- /dev/null +++ b/metadata-ingestion/tests/integration/iceberg/setup/delete.py @@ -0,0 +1,5 @@ +from pyspark.sql import SparkSession + +spark = SparkSession.builder.getOrCreate() + +spark.sql("DROP TABLE nyc.taxis PURGE") diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/iceberg_mces_golden.json b/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/iceberg_mces_golden.json deleted file mode 100644 index b106b91275835..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/iceberg_mces_golden.json +++ /dev/null @@ -1,131 +0,0 @@ -[ -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,namespace.iceberg_test,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.Status": { - "removed": false - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": { - "owner": "new_owner", - "provider": "ICEBERG", - "location": "/namespace/iceberg_test" - }, - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.common.Ownership": { - "owners": [ - { - "owner": "urn:li:corpuser:new_owner", - "type": "TECHNICAL_OWNER" - }, - { - "owner": "urn:li:corpGroup:new_owner", - "type": "TECHNICAL_OWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "namespace.iceberg_test", - "platform": "urn:li:dataPlatform:iceberg", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "Schema(1: level: required string(level documentation),2: event_time: required timestamptz(event_time documentation),3: message: required string(message documentation),4: call_stack: optional list(call_stack documentation))" - } - }, - "fields": [ - { - "fieldPath": "[version=2.0].[type=struct].[type=string].level", - "nullable": false, - "description": "level documentation", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": false}" - }, - { - "fieldPath": "[version=2.0].[type=struct].[type=long].event_time", - "nullable": false, - "description": "event_time documentation", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.TimeType": {} - } - }, - "nativeDataType": "timestamptz", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"logicalType\": \"timestamp-micros\", \"native_data_type\": \"timestamptz\", \"_nullable\": false}" - }, - { - "fieldPath": "[version=2.0].[type=struct].[type=string].message", - "nullable": false, - "description": "message documentation", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": false}" - }, - { - "fieldPath": "[version=2.0].[type=struct].[type=array].[type=string].call_stack", - "nullable": true, - "description": "call_stack documentation", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": { - "nestedType": [ - "string" - ] - } - } - }, - "nativeDataType": "list", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"list\", \"_nullable\": true}" - } - ] - } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "iceberg-test" - } -} -] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/v1.metadata.json b/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/v1.metadata.json deleted file mode 100644 index e4ac0b9685ddc..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/v1.metadata.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "format-version" : 1, - "table-uuid" : "11bbe5de-5ef6-4074-80db-f041065f9862", - "location" : "/namespace/iceberg_test", - "last-updated-ms" : 1648729616724, - "last-column-id" : 5, - "schema" : { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - } - } ] - }, - "current-schema-id" : 0, - "schemas" : [ { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - } - } ] - } ], - "partition-spec" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ], - "default-spec-id" : 0, - "partition-specs" : [ { - "spec-id" : 0, - "fields" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ] - } ], - "last-partition-id" : 1001, - "default-sort-order-id" : 0, - "sort-orders" : [ { - "order-id" : 0, - "fields" : [ ] - } ], - "properties" : { }, - "current-snapshot-id" : -1, - "snapshots" : [ ], - "snapshot-log" : [ ], - "metadata-log" : [ ] -} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/v2.metadata.json b/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/v2.metadata.json deleted file mode 100644 index 02221330b0665..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/v2.metadata.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "format-version" : 1, - "table-uuid" : "16e6ecee-cd5d-470f-a7a6-a197944fa4db", - "location" : "/namespace/iceberg_test", - "last-updated-ms" : 1649086837695, - "last-column-id" : 5, - "schema" : { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string", - "doc" : "level documentation" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz", - "doc" : "event_time documentation" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string", - "doc" : "message documentation" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - }, - "doc" : "call_stack documentation" - } ] - }, - "current-schema-id" : 0, - "schemas" : [ { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string", - "doc" : "level documentation" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz", - "doc" : "event_time documentation" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string", - "doc" : "message documentation" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - }, - "doc" : "call_stack documentation" - } ] - } ], - "partition-spec" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ], - "default-spec-id" : 0, - "partition-specs" : [ { - "spec-id" : 0, - "fields" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ] - } ], - "last-partition-id" : 1001, - "default-sort-order-id" : 0, - "sort-orders" : [ { - "order-id" : 0, - "fields" : [ ] - } ], - "properties" : { - "owner" : "new_owner" - }, - "current-snapshot-id" : -1, - "snapshots" : [ ], - "snapshot-log" : [ ], - "metadata-log" : [ { - "timestamp-ms" : 1649086837511, - "metadata-file" : "/namespace/iceberg_test/metadata/v1.metadata.json" - } ] -} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/version-hint.text b/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/version-hint.text deleted file mode 100644 index d8263ee986059..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/ingest_test/namespace/iceberg_test/metadata/version-hint.text +++ /dev/null @@ -1 +0,0 @@ -2 \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/data/00000-0-72133c37-bb5c-4ffd-8ead-08f33fa2675d-00001.parquet b/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/data/00000-0-72133c37-bb5c-4ffd-8ead-08f33fa2675d-00001.parquet deleted file mode 100644 index 48e75a030f1ca8a4f96a73e30c95d18d2728857b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1089 zcmZ`(&x_MQ6rQHp5@lIk=nM%24S~jr+1e!AZMT$#1rNG-SQb3FV$)<&1O2s0>aS&C zFM^1cY}BU0^#FECsru@Z|`)WM=U1fSsH|s zcPYXs9C^iL!`bi)u0LkEY5UvmN3m1B4Wh?(|)cLL5k_I;L!OJ57k%T?_E*9SNpQo79t{RC!O0DfJmj z5b}RY2)fgxQtX(aG-fC#m*)tvW48Q6>;x0sO*Bgr@3_LXJ;ZPHOmC-8gQd17-K3uH zb~}<*(DZ^X61q%di}C#WI!~W$GA)t{J*+tuCJxYH-ox M2c1bHv(^b diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/data/00000-3-c638dd0f-498a-4ce9-b525-8242758d18f8-00001.parquet b/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/data/00000-3-c638dd0f-498a-4ce9-b525-8242758d18f8-00001.parquet deleted file mode 100644 index c70b94612db64ac9735ceaff2080ec59a4c7b09b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1088 zcmZ{k&ui0Q7{}kHS&1;Z4fehy5HJKV1fsqByH1G4IzEr=f~&$KF^!9zIC;T5njZ3 z{NU-y_q2!^SaO&{==>vigqL9*AoTF$_0PZ&W>@g%PamHz{5xB3G;+h$atghFJ9sxL z5%DGb?8g%=sz}Z7n~g@p`u%0`=!O6ID1n^Yuj*;oN?2k9o=b)rU=^E~5y&Xay4|~) zz=uCmZtmB?^BWswzy}*Q|E8BCv-fUBxLYU1U{+wmg zwt35T{A&!t^RdVC-3%(Rq8QY%8HQhoGJU&4JzwwiGRviuc(^1gVGD#2M4^`DM9c~q z4wS`U%o2udlASD9i@S_1f^w5F#9n9s4g(Y;M7|~QGI_(`O_Vi7Jb#7{GDmzXYu2Js zMNi;{gNs(vywiYp%HTZ84ikakQYuh9+hEAuCOt?3;p0KaRv`Ru@AO88EGFb-5`>cv z3BqJKa?C6TXTzs$gEpz#%6rBx?dbe5Gm!mzJt|ja=%{b_vLZ*J72A}n1w{_0@5$90 z13B;%71_}{LH^_bp~R(oecPoF2MATh$adFhLDcLTkiX?faN4v*T`4S;_mv2#&LBa^ z|B(>1r=e2hm_ceY$jQrd25jkVFBUt=@$bi)rHOaE!_9rfU+(GdUZ46a9ap+TUC-`1 zQZZjt^Qx3<_U*PQmCIBs7?r|?rd8IdQBn=fu=I69TWgl7rDYwv>gviFjBPPp;T}Olq)So1}`a3xdb> z2Y41i58^?=-{4kHUi9P-*gv7~B;91UXb6+{<9VO=nRg~T+qWA8k;|k;-W>h-E+_

rwIFXlpR2EUtGjsvJK5wk#=U zmv~ESks~%Y0d(uexyZ6dj1&+N%1FB4`E2Aq@F&q-j7Twygi%mNEkRLuolwTor34xI zV-`l%cvdd83d*ZBr5a17SXSh|E-FNj%OWVsh>1Wk-J-j?*w1#ASrk>$ZD&!~08AYe zBSgRCNr`^IWDDu8LW*bjLFSU*)7^eDnq&pucuE=6 z+Y=Q|kqud(#--XpHK8=rhJ zapVwh87&ILi9-zHSf*H%L`&r*!iiNL9OYobQA{nmr}9?5t30BpmTf!F!Umw} z;20r#QY006jL99OIua?LdA~8yk-8|vhFhiB@%jc6 zZjU-3vf6QPGebX%gwhX1as#{-C{}s;5m(}#3s-!XIV>3Jp}Wt9B!m8V%Dh-0kKq(Sie<-sm43rd{~;{{s(Y=oTo3+hD}Ud++Q$U4DWNu^l&1?G#$GKUS}O03|86_oW?!am8h*eoCq-0 z2qr`lg9&d95mgEvL@>1orf*o-^O2R0L1t(CAs)2Eo`xQ}hv%aK*I6F$v}ZYE(8apr zX2G<`kcDg`2VT?I_T52vE^t{q0wMVY4_+0E5YS1_1qC1+g=cM)_j(;a4Rr_&Hp+Iv zG~rLmbT?3qbEshYa*8}JdZb8-fRsf(O6Lp{Gn|S!xGJ0(A2%^h=aJvb5J>-sby?@= z$Tu8Y>RP`>+G6;<(y9)LQbxS;43%V3h>YUq9M?jmt$1gO$g9A&6r7O=H7SXdBOy7i zPJT&wAm%^$-@In34x>KM-QH;K12HFc&2KwL0 zNx@+cMndv?A)lZcV#JyNO?mN*Aq)eT&J6lk=$R4+osN%8iYAfc1U@`2VmAud8Pn1G z((&M;ZIfWDJ7&SMsWFczY)VEda$Q9F-|(>otAss|4osh-w4?_Vr8TTwuy>06--iiC zg+T#JA*=yp7IdGFci@Mx zONoWf&Kh1HL{xd?lC9C0g65bSs@i}mHCYQ)ZPBneuf=8(>a8XxK^2{a1azTp7P2tJ z5?Mweh{Cip%7>y{4))5lES$;LwA?MycFs zinv5=WKTI+S}TyHwbjsV+PEQz>o%>eMzX@a3VkI}*4SzdCOEd)G;w*D&KB&ZGr&3M zo^atzVyCIiq|aJvpiXVBJg8;1y4q3`cQjv#<{L|rDlYy)K0mc2k!_~cvdvU+QACEZ z3`tsT>x|}G4M}3R!xl+f*hfTfjq4;i2#$Sk9Q8+m!t z@Y7B%=V8D4?hU_?*u)ly)j%+68vht}5Lh2&KzVWO%IioIWs&E&5RsdOekrG1iR5J@ zbii5AaBx;ybAN2sjX@pxhl*_3c~qF|P8RF4AvzQJcu}Af8wRlJf#RF4$V~k;czLWt zuG5o?3>||m>nUpb-|Cq}0aaVYtqAT(p-2w}J--qG0>2TFY{^iU-le<}Y0pi|+@?Zf zerXcbJU11+j?aeDCfw?hJOe5}0u6O!#bhc=c2 z#pME%JE$%P03x!nOb#kc*VNEh|LM+q&8Q~Nd@l0oHB-u=s%$7g9h&8x|1Vn zLmoaK`-lwW$W@JuT6ttNVPt9#UsP=@e2ssJpdUnmRv~x!xJ2^q@rb18w; z%J%PG__BS)n)=oAclW;BD{t7ad1&Sz_sy8w)O6FCdG)i7t-aRyapp?y&Bmc6pZ;|2 z_oFAj8ohk_>N9&sUq9QG`|!|=^s>Ga?|+nB@z}v-caI)8+%(c#l9b#=gyuwQO4OL+Z?DsYm}jd2!a|8>5{3 E515qaEdT%j diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/ec0bd970-e5ef-4843-abcb-e96a35a8f14d-m0.avro b/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/ec0bd970-e5ef-4843-abcb-e96a35a8f14d-m0.avro deleted file mode 100644 index 1b51cd60d136a9ee77b1f399ab799f586b7eabdd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6036 zcmb_gU2GIp6c(&X2sDkh#>5JffshclxHD7ek5WJdDQXc51}Sy9JA1bsJ3q_JY)QdY z6n#O#DC&d6M4!;a5>zCLKMA3lCoipXogw-lU_2sP~zDMw6l zT%G)qGRWPY#l&9pBIXY#e1inXe3Z{$i5D=?n=6D>h2VqwCv+!Ehaw++!3=a|tt9ln zla_+R9*l(K_d`BGHN=QD0h;pS89OlyTp~H-W1(jX9Bg!aWKuMV94GMMaS^*&z)qTu zK9Gn87j2gcwz^|xEt?wih{C31q$1Zvr2h>cTd+#l^WuRSP?VO$prW*fwF~x6zW)a> z!6-8*U@3$(h|DZ+?qE0y>ZY{jNF`0D&(M*%G1!0>%E&n~Lv&p|h%%&3L57l%l6(h# z2)mS6=N3)2EmcsbE-OE%CAPZSQWbYJUy0@$Ns=lq{%j^Qy(E!srd6}eRB=%h z4PyzCwCdIw&9@Sg#BPTzlCZFkh~ApeNpcVz``)a!b$2L3B@@_5p`8Su=VWAEG;tew z8Po6+PCD&jzw+)4zmV9(7KqhAFlrkA7R zWh8XNS2oU&J1NWVH0>PMTRSl@$GzxGH-I;9iz8FR))q$p0ue9Ho_^ z6kP7?!ugshK=Yypy-1p?&PYI zBWgn)J|Fuc8OV_<8X2|x$Y{dIR3E;m+F1A+{}Mqzhytxl_%aJ1STT`Gjl1C=pkh&0 z#t++$rs0u)Ulpk__5fB){+b@1;D7}cEuq?2c+Gr86bm$tR#3(KZVoso6lx&7;HQy& zXsNQQ>FK@ZK;QR0XHRVCyKVQWpSO(Heps`&X86VBT_f?j=k_V1kJKGrxuSK&;CcU{ zGbd-i-mveFA3GPWeWvrMHPZ0W4fnlu&)WLN-7oLI{-s!tbg}#5-qEp7N5^xwocpX} zdB@pvXD{7x>rr#b+~K1ezHIOpzp?n>e?OhsI{5zY4Z{~s*LMwW>K^-H>9ON)Ur4_C z-L@M?-aN4R*P(4sbm;P#d@3H6C?D?v7(Vkg<*KK+9she_pzuEf13(K!qn_t=WZO{0hb?B~x^Zs36 z&ziHbe%-u7PduAid}Jk-)TUMUpL+M$qRyHt*Imx_?fXRf_|WISoPVe3M8`ktMxNX< KJlZjVm;V92BEpm6vvw=2l}CKpj1F}7)*;=DXHxJ^6m;mKrJExM3oIFQVGkfJ7InhZ zrE*@D+vDA_Eo3i<(Je=aJ?mg-0h%E2%vTUFZBSdn$cxjB0xLq`CUmh$^Qn4Gq*V!- zC2rc7WnCdJ!PHUUdq@Y^0WH_Wo%v>juu#{LUiEC)^y&^4!=8|A-?B*HAoKb7SSry{ z3c+=VK@frbL?AwSnhY|aZ(*rSV?=il6_xFx>kz}>w!Et5_(3t@m9P|Og)DuugS=KEi&9N~wZLb=2N+hJ3<5&pVv<(6!&AL8maRWemDeU<= z^Z*2O6JU9QMQHh*omvtq;;vYY>e^qAj*rVHLm8 zfJAz<6a=+8Kh7=dkyYU!!>Qvst-gv+4xLc)bbSiNE8=`(icb$?Vo%ExCt9ra^tf2& zX=yofJ$nNy)EpgYqojo_h|#`yhLan6Fqg^5Y9=dZ^4WY=Ru#3Nj*RCT#vZO zurg+9sD|8y$Wbrd$MjE((ATJ?gkBr8fSk~O}u401iu= zC8Q|b6y~jcIO)8WBQgxZ#*yH%eIWnux*U*YfUxu9y9)i50}N<+{J0Wjr14!2%yj^J zSWiEySXZ~zazvgXn8vnAVS*Dqor4Mt6pL${oLFhSF)G8bCZH3#&oMEvSmcX>EbADT z?15mbOjd%ChAm!VXl`=hw-=`;gcQ5ob{v)+tWxZO4ZM|8lMR^`+rwiyl}wW8OFe0) zeW9jJ?1p+v@G+6bS2>IikEDeT4D0Wu3Af~N>3tC!4U^u&GkLCj3iYr@9yoa=i5byl zlRP+|KuoO(i%J!r303eaj^HyV*6HAcSfV0ezI}iwl75TCV>7V0Tvk;|sw`)UYC+D* zrINDa&hfd;%LkudIseZue>^;SLms&`eBt`U=-TAokr(S%u4|jSZ=)w}{JPpuKb*gK z2{h*p)R({h^4{ZjjO?0TQ;I_ub`C#nysbR2x^gY^&Ry%v#UIXn^WBW`(P8DxLF4X) zl|zRw&z-D%dL}>prvKK(52pWq=$@VLU(;_6A3G0T|7yqC-J|FB$gj%Jy>RIM)@Jc@ z0 z(`{Ms+M8LIG`kHh7O7e!2l3#ayBBYQpcfGj9_vj|JUH|I%*@+A@ojGVXXf|${bqjO z-*?`uynE){9BfkZTHw@B9scw3eS@_c$XGEbatL*edBcZw@x2axTtxx1tJp*Q=PE{? z8yG8h?vJc!Kvv@`l0X5Y*x%vbUE=UK>%@T!6Cb>Fa}DfaRs+{IKtz{OuVB|$Su)M` z{Ag?s`8#@ak7%)P5e7XV5(ELhhXD3LrNvN?rkez|w7?BW85a52K?6B9C(FdK&t%pc z{7)nfDy1qF&oZzK?@6A7rA!e!$wdeOb>g<@I%kzfuqc~(_`64mI?@~K?b4- z{38PC$%|xA6^0%&Rz!^C4w9l4N9ZnOFuJY4c1Rc%6JLo;K~yL+ctfNNkk4ofr0Y-M z6*L~Yivz|l*`5-4HQAS+x};HERe-qXiIHb*hW~dLoPj03W82e z9p{Plm__X%MD%vcw(gDQyb zB#!#kOH%*b2t!9$O6;{!3z!-GC%!l8xrw7v^j?T5;_k>pp>zNlQ#h<>mY8CVQlz&| z;N0**=|m{@x!nkF zi5?Sqd{x2J@W@)|)Ue53nsG}NSKJr3(KzW7JX7VWrqCFBRDrWsl9>@BHmQQE2_)5; zv8Y_}rBF4mQV2D3Qk_muNF}NRw%f08MT)mbIySi@&5GCKa}R&}vw6FD{@>U4fBogh z{*51I-kdfB!n7BuWM~4RoU+ng}{l~kV3O?T5?{psTe$cPYu0DBiZEfz` oZ_eI(fUf@i{O>Ow{&DH+FaP@a^JlOAv_8Ln?w#2uXNFVy4|K>7D*ylh diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/version-hint.text b/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/version-hint.text deleted file mode 100755 index d8263ee986059..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/datahub/integration/profiling/metadata/version-hint.text +++ /dev/null @@ -1 +0,0 @@ -2 \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/iceberg_mces_golden.json b/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/iceberg_mces_golden.json deleted file mode 100644 index edfa8f80670cf..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/profiling_test/iceberg_mces_golden.json +++ /dev/null @@ -1,129 +0,0 @@ -[ -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,datahub.integration.profiling,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.Status": { - "removed": false - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": { - "owner": "root", - "provider": "ICEBERG", - "location": "/home/iceberg/warehouse/datahub/integration/profiling", - "snapshot-id": "4220723025353071767", - "manifest-list": "/home/iceberg/warehouse/datahub/integration/profiling/metadata/snap-4220723025353071767-1-ec0bd970-e5ef-4843-abcb-e96a35a8f14d.avro" - }, - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.common.Ownership": { - "owners": [ - { - "owner": "urn:li:corpuser:root", - "type": "TECHNICAL_OWNER" - }, - { - "owner": "urn:li:corpGroup:root", - "type": "TECHNICAL_OWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "datahub.integration.profiling", - "platform": "urn:li:dataPlatform:iceberg", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "Schema(1: field_int: optional long(An integer field),2: field_str: optional string(A string field),3: field_timestamp: optional timestamptz(A timestamp field))" - } - }, - "fields": [ - { - "fieldPath": "[version=2.0].[type=struct].[type=long].field_int", - "nullable": true, - "description": "An integer field", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.NumberType": {} - } - }, - "nativeDataType": "long", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"long\", \"_nullable\": true}" - }, - { - "fieldPath": "[version=2.0].[type=struct].[type=string].field_str", - "nullable": true, - "description": "A string field", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" - }, - { - "fieldPath": "[version=2.0].[type=struct].[type=long].field_timestamp", - "nullable": true, - "description": "A timestamp field", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.TimeType": {} - } - }, - "nativeDataType": "timestamptz", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"logicalType\": \"timestamp-micros\", \"native_data_type\": \"timestamptz\", \"_nullable\": true}" - } - ] - } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "iceberg-test" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,datahub.integration.profiling,PROD)", - "changeType": "UPSERT", - "aspectName": "datasetProfile", - "aspect": { - "value": "{\"timestampMillis\": 1586847600000, \"partitionSpec\": {\"type\": \"FULL_TABLE\", \"partition\": \"FULL_TABLE_SNAPSHOT\"}, \"rowCount\": 4, \"columnCount\": 3, \"fieldProfiles\": [{\"fieldPath\": \"field_int\", \"nullCount\": 0, \"nullProportion\": 0.0, \"min\": \"1\", \"max\": \"4\"}, {\"fieldPath\": \"field_str\", \"nullCount\": 0, \"nullProportion\": 0.0}, {\"fieldPath\": \"field_timestamp\", \"nullCount\": 2, \"nullProportion\": 0.5, \"min\": \"2022-05-03 21:42:29\", \"max\": \"2022-05-03 21:44:04\"}]}", - "contentType": "application/json" - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "iceberg-test" - } -} -] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/iceberg_deleted_table_mces_golden.json b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/iceberg_deleted_table_mces_golden.json deleted file mode 100644 index d376d8b645d66..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/iceberg_deleted_table_mces_golden.json +++ /dev/null @@ -1,159 +0,0 @@ -[ -{ - "proposedSnapshot": { - "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,test_platform_instance.namespace.iceberg_test,PROD)", - "aspects": [ - { - "com.linkedin.pegasus2avro.common.Status": { - "removed": false - } - }, - { - "com.linkedin.pegasus2avro.dataset.DatasetProperties": { - "customProperties": { - "owner": "new_owner", - "provider": "ICEBERG", - "location": "/namespace/iceberg_test" - }, - "tags": [] - } - }, - { - "com.linkedin.pegasus2avro.common.Ownership": { - "owners": [ - { - "owner": "urn:li:corpuser:new_owner", - "type": "TECHNICAL_OWNER" - }, - { - "owner": "urn:li:corpGroup:new_owner", - "type": "TECHNICAL_OWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - { - "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "namespace.iceberg_test", - "platform": "urn:li:dataPlatform:iceberg", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.pegasus2avro.schema.OtherSchema": { - "rawSchema": "Schema(1: level: required string(level documentation),2: event_time: required timestamptz(event_time documentation),3: message: required string(message documentation),4: call_stack: optional list(call_stack documentation))" - } - }, - "fields": [ - { - "fieldPath": "[version=2.0].[type=struct].[type=string].level", - "nullable": false, - "description": "level documentation", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": false}" - }, - { - "fieldPath": "[version=2.0].[type=struct].[type=long].event_time", - "nullable": false, - "description": "event_time documentation", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.TimeType": {} - } - }, - "nativeDataType": "timestamptz", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"logicalType\": \"timestamp-micros\", \"native_data_type\": \"timestamptz\", \"_nullable\": false}" - }, - { - "fieldPath": "[version=2.0].[type=struct].[type=string].message", - "nullable": false, - "description": "message documentation", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": false}" - }, - { - "fieldPath": "[version=2.0].[type=struct].[type=array].[type=string].call_stack", - "nullable": true, - "description": "call_stack documentation", - "type": { - "type": { - "com.linkedin.pegasus2avro.schema.ArrayType": { - "nestedType": [ - "string" - ] - } - } - }, - "nativeDataType": "list", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"list\", \"_nullable\": true}" - } - ] - } - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "iceberg-2020_04_14-07_00_00" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,test_platform_instance.namespace.iceberg_test,PROD)", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "value": "{\"platform\": \"urn:li:dataPlatform:iceberg\", \"instance\": \"urn:li:dataPlatformInstance:(urn:li:dataPlatform:iceberg,test_platform_instance)\"}", - "contentType": "application/json" - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "iceberg-2020_04_14-07_00_00" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:iceberg,test_platform_instance.namespace.iceberg_test_2,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "value": "{\"removed\": true}", - "contentType": "application/json" - }, - "systemMetadata": { - "lastObserved": 1586847600000, - "runId": "iceberg-2020_04_14-07_00_00" - } -} -] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/v1.metadata.json b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/v1.metadata.json deleted file mode 100644 index e4ac0b9685ddc..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/v1.metadata.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "format-version" : 1, - "table-uuid" : "11bbe5de-5ef6-4074-80db-f041065f9862", - "location" : "/namespace/iceberg_test", - "last-updated-ms" : 1648729616724, - "last-column-id" : 5, - "schema" : { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - } - } ] - }, - "current-schema-id" : 0, - "schemas" : [ { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - } - } ] - } ], - "partition-spec" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ], - "default-spec-id" : 0, - "partition-specs" : [ { - "spec-id" : 0, - "fields" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ] - } ], - "last-partition-id" : 1001, - "default-sort-order-id" : 0, - "sort-orders" : [ { - "order-id" : 0, - "fields" : [ ] - } ], - "properties" : { }, - "current-snapshot-id" : -1, - "snapshots" : [ ], - "snapshot-log" : [ ], - "metadata-log" : [ ] -} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/v2.metadata.json b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/v2.metadata.json deleted file mode 100644 index 02221330b0665..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/v2.metadata.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "format-version" : 1, - "table-uuid" : "16e6ecee-cd5d-470f-a7a6-a197944fa4db", - "location" : "/namespace/iceberg_test", - "last-updated-ms" : 1649086837695, - "last-column-id" : 5, - "schema" : { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string", - "doc" : "level documentation" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz", - "doc" : "event_time documentation" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string", - "doc" : "message documentation" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - }, - "doc" : "call_stack documentation" - } ] - }, - "current-schema-id" : 0, - "schemas" : [ { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string", - "doc" : "level documentation" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz", - "doc" : "event_time documentation" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string", - "doc" : "message documentation" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - }, - "doc" : "call_stack documentation" - } ] - } ], - "partition-spec" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ], - "default-spec-id" : 0, - "partition-specs" : [ { - "spec-id" : 0, - "fields" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ] - } ], - "last-partition-id" : 1001, - "default-sort-order-id" : 0, - "sort-orders" : [ { - "order-id" : 0, - "fields" : [ ] - } ], - "properties" : { - "owner" : "new_owner" - }, - "current-snapshot-id" : -1, - "snapshots" : [ ], - "snapshot-log" : [ ], - "metadata-log" : [ { - "timestamp-ms" : 1649086837511, - "metadata-file" : "/namespace/iceberg_test/metadata/v1.metadata.json" - } ] -} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/version-hint.text b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/version-hint.text deleted file mode 100644 index d8263ee986059..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test/metadata/version-hint.text +++ /dev/null @@ -1 +0,0 @@ -2 \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/v1.metadata.json b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/v1.metadata.json deleted file mode 100644 index e4ac0b9685ddc..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/v1.metadata.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "format-version" : 1, - "table-uuid" : "11bbe5de-5ef6-4074-80db-f041065f9862", - "location" : "/namespace/iceberg_test", - "last-updated-ms" : 1648729616724, - "last-column-id" : 5, - "schema" : { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - } - } ] - }, - "current-schema-id" : 0, - "schemas" : [ { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - } - } ] - } ], - "partition-spec" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ], - "default-spec-id" : 0, - "partition-specs" : [ { - "spec-id" : 0, - "fields" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ] - } ], - "last-partition-id" : 1001, - "default-sort-order-id" : 0, - "sort-orders" : [ { - "order-id" : 0, - "fields" : [ ] - } ], - "properties" : { }, - "current-snapshot-id" : -1, - "snapshots" : [ ], - "snapshot-log" : [ ], - "metadata-log" : [ ] -} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/v2.metadata.json b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/v2.metadata.json deleted file mode 100644 index 02221330b0665..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/v2.metadata.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "format-version" : 1, - "table-uuid" : "16e6ecee-cd5d-470f-a7a6-a197944fa4db", - "location" : "/namespace/iceberg_test", - "last-updated-ms" : 1649086837695, - "last-column-id" : 5, - "schema" : { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string", - "doc" : "level documentation" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz", - "doc" : "event_time documentation" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string", - "doc" : "message documentation" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - }, - "doc" : "call_stack documentation" - } ] - }, - "current-schema-id" : 0, - "schemas" : [ { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string", - "doc" : "level documentation" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz", - "doc" : "event_time documentation" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string", - "doc" : "message documentation" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - }, - "doc" : "call_stack documentation" - } ] - } ], - "partition-spec" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ], - "default-spec-id" : 0, - "partition-specs" : [ { - "spec-id" : 0, - "fields" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ] - } ], - "last-partition-id" : 1001, - "default-sort-order-id" : 0, - "sort-orders" : [ { - "order-id" : 0, - "fields" : [ ] - } ], - "properties" : { - "owner" : "new_owner" - }, - "current-snapshot-id" : -1, - "snapshots" : [ ], - "snapshot-log" : [ ], - "metadata-log" : [ { - "timestamp-ms" : 1649086837511, - "metadata-file" : "/namespace/iceberg_test/metadata/v1.metadata.json" - } ] -} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/version-hint.text b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/version-hint.text deleted file mode 100644 index d8263ee986059..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run1/namespace/iceberg_test_2/metadata/version-hint.text +++ /dev/null @@ -1 +0,0 @@ -2 \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/v1.metadata.json b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/v1.metadata.json deleted file mode 100644 index e4ac0b9685ddc..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/v1.metadata.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "format-version" : 1, - "table-uuid" : "11bbe5de-5ef6-4074-80db-f041065f9862", - "location" : "/namespace/iceberg_test", - "last-updated-ms" : 1648729616724, - "last-column-id" : 5, - "schema" : { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - } - } ] - }, - "current-schema-id" : 0, - "schemas" : [ { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - } - } ] - } ], - "partition-spec" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ], - "default-spec-id" : 0, - "partition-specs" : [ { - "spec-id" : 0, - "fields" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ] - } ], - "last-partition-id" : 1001, - "default-sort-order-id" : 0, - "sort-orders" : [ { - "order-id" : 0, - "fields" : [ ] - } ], - "properties" : { }, - "current-snapshot-id" : -1, - "snapshots" : [ ], - "snapshot-log" : [ ], - "metadata-log" : [ ] -} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/v2.metadata.json b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/v2.metadata.json deleted file mode 100644 index 02221330b0665..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/v2.metadata.json +++ /dev/null @@ -1,118 +0,0 @@ -{ - "format-version" : 1, - "table-uuid" : "16e6ecee-cd5d-470f-a7a6-a197944fa4db", - "location" : "/namespace/iceberg_test", - "last-updated-ms" : 1649086837695, - "last-column-id" : 5, - "schema" : { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string", - "doc" : "level documentation" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz", - "doc" : "event_time documentation" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string", - "doc" : "message documentation" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - }, - "doc" : "call_stack documentation" - } ] - }, - "current-schema-id" : 0, - "schemas" : [ { - "type" : "struct", - "schema-id" : 0, - "fields" : [ { - "id" : 1, - "name" : "level", - "required" : true, - "type" : "string", - "doc" : "level documentation" - }, { - "id" : 2, - "name" : "event_time", - "required" : true, - "type" : "timestamptz", - "doc" : "event_time documentation" - }, { - "id" : 3, - "name" : "message", - "required" : true, - "type" : "string", - "doc" : "message documentation" - }, { - "id" : 4, - "name" : "call_stack", - "required" : false, - "type" : { - "type" : "list", - "element-id" : 5, - "element" : "string", - "element-required" : true - }, - "doc" : "call_stack documentation" - } ] - } ], - "partition-spec" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ], - "default-spec-id" : 0, - "partition-specs" : [ { - "spec-id" : 0, - "fields" : [ { - "name" : "event_time_hour", - "transform" : "hour", - "source-id" : 2, - "field-id" : 1000 - }, { - "name" : "level", - "transform" : "identity", - "source-id" : 1, - "field-id" : 1001 - } ] - } ], - "last-partition-id" : 1001, - "default-sort-order-id" : 0, - "sort-orders" : [ { - "order-id" : 0, - "fields" : [ ] - } ], - "properties" : { - "owner" : "new_owner" - }, - "current-snapshot-id" : -1, - "snapshots" : [ ], - "snapshot-log" : [ ], - "metadata-log" : [ { - "timestamp-ms" : 1649086837511, - "metadata-file" : "/namespace/iceberg_test/metadata/v1.metadata.json" - } ] -} \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/version-hint.text b/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/version-hint.text deleted file mode 100644 index d8263ee986059..0000000000000 --- a/metadata-ingestion/tests/integration/iceberg/test_data/stateful_test/run2/namespace/iceberg_test/metadata/version-hint.text +++ /dev/null @@ -1 +0,0 @@ -2 \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/iceberg/test_iceberg.py b/metadata-ingestion/tests/integration/iceberg/test_iceberg.py index b26b574e54c47..e2a86480672e5 100644 --- a/metadata-ingestion/tests/integration/iceberg/test_iceberg.py +++ b/metadata-ingestion/tests/integration/iceberg/test_iceberg.py @@ -1,14 +1,14 @@ -from pathlib import PosixPath -from typing import Any, Dict, Union +import subprocess +import sys +from typing import Any, Dict, List from unittest.mock import patch import pytest from freezegun import freeze_time -from iceberg.core.filesystem.file_status import FileStatus -from iceberg.core.filesystem.local_filesystem import LocalFileSystem -from datahub.ingestion.run.pipeline import Pipeline from tests.test_helpers import mce_helpers +from tests.test_helpers.click_helpers import run_datahub_cmd +from tests.test_helpers.docker_helpers import wait_for_port from tests.test_helpers.state_helpers import ( get_current_checkpoint_from_pipeline, run_and_get_pipeline, @@ -20,89 +20,92 @@ GMS_SERVER = f"http://localhost:{GMS_PORT}" +@pytest.fixture(autouse=True) +def skip_tests_if_python_before_3_8(): + if sys.version_info < (3, 8): + pytest.skip("Requires python 3.8 or higher") + + +def spark_submit(file_path: str, args: str = "") -> None: + docker = "docker" + command = f"{docker} exec spark-iceberg spark-submit {file_path} {args}" + ret = subprocess.run( + command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + assert ret.returncode == 0 + + @freeze_time(FROZEN_TIME) @pytest.mark.integration -def test_iceberg_ingest(pytestconfig, tmp_path, mock_time): +def test_iceberg_ingest(docker_compose_runner, pytestconfig, tmp_path, mock_time): test_resources_dir = pytestconfig.rootpath / "tests/integration/iceberg/" - # Run the metadata ingestion pipeline. - pipeline = Pipeline.create( - { - "run_id": "iceberg-test", - "source": { - "type": "iceberg", - "config": { - "localfs": str(test_resources_dir / "test_data/ingest_test"), - "user_ownership_property": "owner", - "group_ownership_property": "owner", - }, - }, - "sink": { - "type": "file", - "config": { - "filename": f"{tmp_path}/iceberg_mces.json", - }, - }, - } - ) - pipeline.run() - pipeline.raise_from_status() - - # Verify the output. - mce_helpers.check_golden_file( - pytestconfig, - output_path=tmp_path / "iceberg_mces.json", - golden_path=test_resources_dir - / "test_data/ingest_test/iceberg_mces_golden.json", - ) + with docker_compose_runner( + test_resources_dir / "docker-compose.yml", "iceberg" + ) as docker_services: + wait_for_port(docker_services, "spark-iceberg", 8888, timeout=120) + + # Run the create.py pyspark file to populate the table. + spark_submit("/home/iceberg/setup/create.py", "nyc.taxis") + + # Run the metadata ingestion pipeline. + config_file = (test_resources_dir / "iceberg_to_file.yml").resolve() + run_datahub_cmd( + ["ingest", "--strict-warnings", "-c", f"{config_file}"], tmp_path=tmp_path + ) + # These paths change from one instance run of the clickhouse docker to the other, and the FROZEN_TIME does not apply to these. + ignore_paths: List[str] = [ + r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['customProperties'\]\['created-at'\]", + r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['customProperties'\]\['snapshot-id'\]", + r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['customProperties'\]\['manifest-list'\]", + ] + # Verify the output. + mce_helpers.check_golden_file( + pytestconfig, + ignore_paths=ignore_paths, + output_path=tmp_path / "iceberg_mces.json", + golden_path=test_resources_dir / "iceberg_ingest_mces_golden.json", + ) @freeze_time(FROZEN_TIME) @pytest.mark.integration -def test_iceberg_stateful_ingest(pytestconfig, tmp_path, mock_time, mock_datahub_graph): - test_resources_dir = ( - pytestconfig.rootpath / "tests/integration/iceberg/test_data/stateful_test" - ) +def test_iceberg_stateful_ingest( + docker_compose_runner, pytestconfig, tmp_path, mock_time, mock_datahub_graph +): + test_resources_dir = pytestconfig.rootpath / "tests/integration/iceberg" platform_instance = "test_platform_instance" - scd_before_deletion: Dict[str, Any] = { - "localfs": str(test_resources_dir / "run1"), - "user_ownership_property": "owner", - "group_ownership_property": "owner", - "platform_instance": f"{platform_instance}", - # enable stateful ingestion - "stateful_ingestion": { - "enabled": True, - "remove_stale_metadata": True, - "fail_safe_threshold": 100.0, - "state_provider": { - "type": "datahub", - "config": {"datahub_api": {"server": GMS_SERVER}}, - }, - }, - } - - scd_after_deletion: Dict[str, Any] = { - "localfs": str(test_resources_dir / "run2"), - "user_ownership_property": "owner", - "group_ownership_property": "owner", - "platform_instance": f"{platform_instance}", - # enable stateful ingestion - "stateful_ingestion": { - "enabled": True, - "remove_stale_metadata": True, - "fail_safe_threshold": 100.0, - "state_provider": { - "type": "datahub", - "config": {"datahub_api": {"server": GMS_SERVER}}, - }, - }, - } - pipeline_config_dict: Dict[str, Any] = { "source": { "type": "iceberg", - "config": scd_before_deletion, + "config": { + "catalog": { + "name": "default", + "type": "rest", + "config": { + "uri": "http://localhost:8181", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + "s3.region": "us-east-1", + "warehouse": "s3a://warehouse/wh/", + "s3.endpoint": "http://localhost:9000", + }, + }, + "user_ownership_property": "owner", + "group_ownership_property": "owner", + "platform_instance": f"{platform_instance}", + # enable stateful ingestion + "stateful_ingestion": { + "enabled": True, + "remove_stale_metadata": True, + "fail_safe_threshold": 100.0, + "state_provider": { + "type": "datahub", + "config": {"datahub_api": {"server": GMS_SERVER}}, + }, + }, + }, }, "sink": { # we are not really interested in the resulting events for this test @@ -111,10 +114,18 @@ def test_iceberg_stateful_ingest(pytestconfig, tmp_path, mock_time, mock_datahub "pipeline_name": "test_pipeline", } - with patch( + with docker_compose_runner( + test_resources_dir / "docker-compose.yml", "iceberg" + ) as docker_services, patch( "datahub.ingestion.source.state_provider.datahub_ingestion_checkpointing_provider.DataHubGraph", mock_datahub_graph, ) as mock_checkpoint: + wait_for_port(docker_services, "spark-iceberg", 8888, timeout=120) + + # Run the create.py pyspark file to populate two tables. + spark_submit("/home/iceberg/setup/create.py", "nyc.taxis") + spark_submit("/home/iceberg/setup/create.py", "nyc.another_taxis") + # Both checkpoint and reporting will use the same mocked graph instance. mock_checkpoint.return_value = mock_datahub_graph @@ -125,13 +136,14 @@ def test_iceberg_stateful_ingest(pytestconfig, tmp_path, mock_time, mock_datahub assert checkpoint1 assert checkpoint1.state - # Set iceberg config where a table is deleted. - pipeline_config_dict["source"]["config"] = scd_after_deletion # Capture MCEs of second run to validate Status(removed=true) deleted_mces_path = f"{tmp_path}/iceberg_deleted_mces.json" pipeline_config_dict["sink"]["type"] = "file" pipeline_config_dict["sink"]["config"] = {"filename": deleted_mces_path} + # Run the delete.py pyspark file to delete the table. + spark_submit("/home/iceberg/setup/delete.py") + # Do the second run of the pipeline. pipeline_run2 = run_and_get_pipeline(pipeline_config_dict) checkpoint2 = get_current_checkpoint_from_pipeline(pipeline_run2) @@ -149,7 +161,7 @@ def test_iceberg_stateful_ingest(pytestconfig, tmp_path, mock_time, mock_datahub assert len(difference_urns) == 1 - urn1 = "urn:li:dataset:(urn:li:dataPlatform:iceberg,test_platform_instance.namespace.iceberg_test_2,PROD)" + urn1 = "urn:li:dataset:(urn:li:dataPlatform:iceberg,test_platform_instance.nyc.taxis,PROD)" assert urn1 in difference_urns @@ -161,9 +173,16 @@ def test_iceberg_stateful_ingest(pytestconfig, tmp_path, mock_time, mock_datahub pipeline=pipeline_run2, expected_providers=1 ) + ignore_paths: List[str] = [ + r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['customProperties'\]\['created-at'\]", + r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['customProperties'\]\['snapshot-id'\]", + r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['customProperties'\]\['manifest-list'\]", + ] + # Verify the output. mce_helpers.check_golden_file( pytestconfig, + ignore_paths=ignore_paths, output_path=deleted_mces_path, golden_path=test_resources_dir / "iceberg_deleted_table_mces_golden.json", ) @@ -171,117 +190,32 @@ def test_iceberg_stateful_ingest(pytestconfig, tmp_path, mock_time, mock_datahub @freeze_time(FROZEN_TIME) @pytest.mark.integration -def test_iceberg_profiling(pytestconfig, tmp_path, mock_time): - """ - This test is using a table created using https://github.com/tabular-io/docker-spark-iceberg. - Here are the DDL statements that you can execute with `spark-sql`: - ```SQL - CREATE TABLE datahub.integration.profiling ( - field_int bigint COMMENT 'An integer field', - field_str string COMMENT 'A string field', - field_timestamp timestamp COMMENT 'A timestamp field') - USING iceberg; - - INSERT INTO datahub.integration.profiling VALUES (1, 'row1', current_timestamp()), (2, 'row2', null); - INSERT INTO datahub.integration.profiling VALUES (3, 'row3', current_timestamp()), (4, 'row4', null); - ``` - - When importing the metadata files into this test, we need to create a `version-hint.text` with a value that - reflects the version of the table, and then change the code in `TestLocalFileSystem._replace_path()` accordingly. - """ - test_resources_dir = ( - pytestconfig.rootpath / "tests/integration/iceberg/test_data/profiling_test" - ) +def test_iceberg_profiling(docker_compose_runner, pytestconfig, tmp_path, mock_time): + test_resources_dir = pytestconfig.rootpath / "tests/integration/iceberg/" - # Run the metadata ingestion pipeline. - pipeline = Pipeline.create( - { - "run_id": "iceberg-test", - "source": { - "type": "iceberg", - "config": { - "localfs": str(test_resources_dir), - "user_ownership_property": "owner", - "group_ownership_property": "owner", - "max_path_depth": 3, - "profiling": { - "enabled": True, - }, - "table_pattern": {"allow": ["datahub.integration.profiling"]}, - }, - }, - "sink": { - "type": "file", - "config": { - "filename": f"{tmp_path}/iceberg_mces.json", - }, - }, - } - ) + with docker_compose_runner( + test_resources_dir / "docker-compose.yml", "iceberg" + ) as docker_services: + wait_for_port(docker_services, "spark-iceberg", 8888, timeout=120) - class TestLocalFileSystem(LocalFileSystem): - # This class acts as a wrapper on LocalFileSystem to intercept calls using a path location. - # The wrapper will normalize those paths to be usable by the test. - fs: LocalFileSystem - - @staticmethod - def _replace_path(path: Union[str, PosixPath]) -> str: - # When the Iceberg table was created, its warehouse folder was '/home/iceberg/warehouse'. Iceberg tables - # are not portable, so we need to replace the warehouse folder by the test location at runtime. - normalized_path: str = str(path).replace( - "/home/iceberg/warehouse", str(test_resources_dir) - ) - - # When the Iceberg table was created, a postgres catalog was used instead of a HadoopCatalog. The HadoopCatalog - # expects a file named 'v{}.metadata.json' where {} is the version number from 'version-hint.text'. Since - # 'v2.metadata.json' does not exist, we will redirect the call to '00002-02782173-8364-4caf-a3c4-9567c1d6608f.metadata.json'. - if normalized_path.endswith("v2.metadata.json"): - return normalized_path.replace( - "v2.metadata.json", - "00002-cc241948-4c12-46d0-9a75-ce3578ec03d4.metadata.json", - ) - return normalized_path - - def __init__(self, fs: LocalFileSystem) -> None: - self.fs = fs - - def open(self, path: str, mode: str = "rb") -> object: - return self.fs.open(TestLocalFileSystem._replace_path(path), mode) - - def delete(self, path: str) -> None: - self.fs.delete(TestLocalFileSystem._replace_path(path)) - - def stat(self, path: str) -> FileStatus: - return self.fs.stat(TestLocalFileSystem._replace_path(path)) - - @staticmethod - def fix_path(path: str) -> str: - return TestLocalFileSystem.fs.fix_path( - TestLocalFileSystem._replace_path(path) - ) - - def create(self, path: str, overwrite: bool = False) -> object: - return self.fs.create(TestLocalFileSystem._replace_path(path), overwrite) - - def rename(self, src: str, dest: str) -> bool: - return self.fs.rename( - TestLocalFileSystem._replace_path(src), - TestLocalFileSystem._replace_path(dest), - ) - - def exists(self, path: str) -> bool: - return self.fs.exists(TestLocalFileSystem._replace_path(path)) - - local_fs_wrapper: TestLocalFileSystem = TestLocalFileSystem( - LocalFileSystem.get_instance() - ) - with patch.object(LocalFileSystem, "get_instance", return_value=local_fs_wrapper): - pipeline.run() - pipeline.raise_from_status() - - # Verify the output. - mce_helpers.check_golden_file( - pytestconfig, - output_path=tmp_path / "iceberg_mces.json", - golden_path=test_resources_dir / "iceberg_mces_golden.json", - ) + # Run the create.py pyspark file to populate the table. + spark_submit("/home/iceberg/setup/create.py", "nyc.taxis") + + # Run the metadata ingestion pipeline. + config_file = (test_resources_dir / "iceberg_profile_to_file.yml").resolve() + run_datahub_cmd( + ["ingest", "--strict-warnings", "-c", f"{config_file}"], tmp_path=tmp_path + ) + # These paths change from one instance run of the clickhouse docker to the other, and the FROZEN_TIME does not apply to these. + ignore_paths: List[str] = [ + r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['customProperties'\]\['created-at'\]", + r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['customProperties'\]\['snapshot-id'\]", + r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['customProperties'\]\['manifest-list'\]", + ] + # Verify the output. + mce_helpers.check_golden_file( + pytestconfig, + ignore_paths=ignore_paths, + output_path=tmp_path / "iceberg_mces.json", + golden_path=test_resources_dir / "iceberg_profile_mces_golden.json", + ) diff --git a/metadata-ingestion/tests/unit/test_iceberg.py b/metadata-ingestion/tests/unit/test_iceberg.py index f3ea071d76400..768d4f958af1f 100644 --- a/metadata-ingestion/tests/unit/test_iceberg.py +++ b/metadata-ingestion/tests/unit/test_iceberg.py @@ -1,405 +1,482 @@ +import sys +import uuid +from decimal import Decimal from typing import Any, Optional import pytest -from iceberg.api import types as IcebergTypes -from iceberg.api.types.types import NestedField - -from datahub.configuration.common import ConfigurationError -from datahub.ingestion.api.common import PipelineContext -from datahub.ingestion.source.azure.azure_common import AdlsSourceConfig -from datahub.ingestion.source.iceberg.iceberg import IcebergSource, IcebergSourceConfig -from datahub.metadata.com.linkedin.pegasus2avro.schema import ArrayType, SchemaField -from datahub.metadata.schema_classes import ( - ArrayTypeClass, - BooleanTypeClass, - BytesTypeClass, - DateTypeClass, - FixedTypeClass, - NumberTypeClass, - RecordTypeClass, - StringTypeClass, - TimeTypeClass, -) - +from pydantic import ValidationError + +if sys.version_info >= (3, 8): + from pyiceberg.schema import Schema + from pyiceberg.types import ( + BinaryType, + BooleanType, + DateType, + DecimalType, + DoubleType, + FixedType, + FloatType, + IcebergType, + IntegerType, + ListType, + LongType, + MapType, + NestedField, + PrimitiveType, + StringType, + StructType, + TimestampType, + TimestamptzType, + TimeType, + UUIDType, + ) -def with_iceberg_source() -> IcebergSource: - adls: AdlsSourceConfig = AdlsSourceConfig( - account_name="test", account_key="test", container_name="test" + from datahub.ingestion.api.common import PipelineContext + from datahub.ingestion.source.iceberg.iceberg import ( + IcebergProfiler, + IcebergSource, + IcebergSourceConfig, ) - return IcebergSource( - ctx=PipelineContext(run_id="iceberg-source-test"), - config=IcebergSourceConfig(adls=adls), + from datahub.ingestion.source.iceberg.iceberg_common import IcebergCatalogConfig + from datahub.metadata.com.linkedin.pegasus2avro.schema import ArrayType, SchemaField + from datahub.metadata.schema_classes import ( + ArrayTypeClass, + BooleanTypeClass, + BytesTypeClass, + DateTypeClass, + FixedTypeClass, + NumberTypeClass, + RecordTypeClass, + StringTypeClass, + TimeTypeClass, ) - -def assert_field( - schema_field: SchemaField, - expected_description: Optional[str], - expected_nullable: bool, - expected_type: Any, -) -> None: - assert ( - schema_field.description == expected_description - ), f"Field description '{schema_field.description}' is different from expected description '{expected_description}'" - assert ( - schema_field.nullable == expected_nullable - ), f"Field nullable '{schema_field.nullable}' is different from expected nullable '{expected_nullable}'" - assert isinstance( - schema_field.type.type, expected_type - ), f"Field type {schema_field.type.type} is different from expected type {expected_type}" - - -def test_adls_config_no_credential(): - """ - Test when no ADLS credential information is provided (SAS token, Account key). - """ - with pytest.raises(ConfigurationError): - AdlsSourceConfig(account_name="test", container_name="test") - - -def test_adls_config_with_sas_credential(): - """ - Test when a SAS token is used as an ADLS credential. - """ - AdlsSourceConfig(account_name="test", sas_token="test", container_name="test") - - -def test_adls_config_with_key_credential(): - """ - Test when an account key is used as an ADLS credential. - """ - AdlsSourceConfig(account_name="test", account_key="test", container_name="test") - - -def test_adls_config_with_client_secret_credential(): - """ - Test when a client secret is used as an ADLS credential. - """ - AdlsSourceConfig( - account_name="test", - tenant_id="test", - client_id="test", - client_secret="test", - container_name="test", + pytestmark = pytest.mark.skipif( + sys.version_info < (3, 8), reason="requires python 3.8 or higher" ) - # Test when tenant_id is missing - with pytest.raises(ConfigurationError): - AdlsSourceConfig( - account_name="test", - client_id="test", - client_secret="test", - container_name="test", + def with_iceberg_source() -> IcebergSource: + catalog: IcebergCatalogConfig = IcebergCatalogConfig( + name="test", type="rest", config={} ) - - # Test when client_id is missing - with pytest.raises(ConfigurationError): - AdlsSourceConfig( - account_name="test", - tenant_id="test", - client_secret="test", - container_name="test", - ) - - # Test when client_secret is missing - with pytest.raises(ConfigurationError): - AdlsSourceConfig( - account_name="test", - tenant_id="test", - client_id="test", - container_name="test", - ) - - -def test_config_for_tests(): - """ - Test valid iceberg source that will be used in unit tests. - """ - with_iceberg_source() - - -def test_config_no_filesystem(): - """ - Test when a SAS token is used as an ADLS credential. - """ - with pytest.raises(ConfigurationError): - IcebergSource( + return IcebergSource( ctx=PipelineContext(run_id="iceberg-source-test"), - config=IcebergSourceConfig(), + config=IcebergSourceConfig(catalog=catalog), ) - -def test_config_multiple_filesystems(): - """ - Test when more than 1 filesystem is configured. - """ - with pytest.raises(ConfigurationError): - adls: AdlsSourceConfig = AdlsSourceConfig( - account_name="test", container_name="test" - ) - IcebergSource( - ctx=PipelineContext(run_id="iceberg-source-test"), - config=IcebergSourceConfig(adls=adls, localfs="/tmp"), + def with_iceberg_profiler() -> IcebergProfiler: + iceberg_source_instance = with_iceberg_source() + return IcebergProfiler( + iceberg_source_instance.report, iceberg_source_instance.config.profiling ) - -@pytest.mark.parametrize( - "iceberg_type, expected_schema_field_type", - [ - (IcebergTypes.BinaryType.get(), BytesTypeClass), - (IcebergTypes.BooleanType.get(), BooleanTypeClass), - (IcebergTypes.DateType.get(), DateTypeClass), - ( - IcebergTypes.DecimalType.of(3, 2), - NumberTypeClass, - ), - (IcebergTypes.DoubleType.get(), NumberTypeClass), - (IcebergTypes.FixedType.of_length(4), FixedTypeClass), - (IcebergTypes.FloatType.get(), NumberTypeClass), - (IcebergTypes.IntegerType.get(), NumberTypeClass), - (IcebergTypes.LongType.get(), NumberTypeClass), - (IcebergTypes.StringType.get(), StringTypeClass), - ( - IcebergTypes.TimestampType.with_timezone(), - TimeTypeClass, - ), - ( - IcebergTypes.TimestampType.without_timezone(), - TimeTypeClass, - ), - (IcebergTypes.TimeType.get(), TimeTypeClass), - ( - IcebergTypes.UUIDType.get(), - StringTypeClass, - ), - ], -) -def test_iceberg_primitive_type_to_schema_field( - iceberg_type: IcebergTypes.PrimitiveType, expected_schema_field_type: Any -) -> None: - """ - Test converting a primitive typed Iceberg field to a SchemaField - """ - iceberg_source_instance = with_iceberg_source() - for column in [ - NestedField.required( - 1, "required_field", iceberg_type, "required field documentation" - ), - NestedField.optional( - 1, "optional_field", iceberg_type, "optional field documentation" - ), - ]: - schema_fields = iceberg_source_instance._get_schema_fields_for_column(column) + def assert_field( + schema_field: SchemaField, + expected_description: Optional[str], + expected_nullable: bool, + expected_type: Any, + ) -> None: assert ( - len(schema_fields) == 1 - ), f"Expected 1 field, but got {len(schema_fields)}" - assert_field( - schema_fields[0], column.doc, column.is_optional, expected_schema_field_type - ) - - -@pytest.mark.parametrize( - "iceberg_type, expected_array_nested_type", - [ - (IcebergTypes.BinaryType.get(), "bytes"), - (IcebergTypes.BooleanType.get(), "boolean"), - (IcebergTypes.DateType.get(), "date"), - ( - IcebergTypes.DecimalType.of(3, 2), - "decimal", - ), - (IcebergTypes.DoubleType.get(), "double"), - (IcebergTypes.FixedType.of_length(4), "fixed"), - (IcebergTypes.FloatType.get(), "float"), - (IcebergTypes.IntegerType.get(), "int"), - (IcebergTypes.LongType.get(), "long"), - (IcebergTypes.StringType.get(), "string"), - ( - IcebergTypes.TimestampType.with_timezone(), - "timestamp-micros", - ), - ( - IcebergTypes.TimestampType.without_timezone(), - "timestamp-micros", - ), - (IcebergTypes.TimeType.get(), "time-micros"), - ( - IcebergTypes.UUIDType.get(), - "uuid", - ), - ], -) -def test_iceberg_list_to_schema_field( - iceberg_type: IcebergTypes.PrimitiveType, expected_array_nested_type: Any -) -> None: - """ - Test converting a list typed Iceberg field to an ArrayType SchemaField, including the list nested type. - """ - list_column: NestedField = NestedField.required( - 1, - "listField", - IcebergTypes.ListType.of_required(2, iceberg_type), - "documentation", + schema_field.description == expected_description + ), f"Field description '{schema_field.description}' is different from expected description '{expected_description}'" + assert ( + schema_field.nullable == expected_nullable + ), f"Field nullable '{schema_field.nullable}' is different from expected nullable '{expected_nullable}'" + assert isinstance( + schema_field.type.type, expected_type + ), f"Field type {schema_field.type.type} is different from expected type {expected_type}" + + def test_config_no_catalog(): + """ + Test when no Iceberg catalog is provided. + """ + with pytest.raises(ValidationError, match="catalog"): + IcebergSourceConfig() # type: ignore + + def test_config_catalog_not_configured(): + """ + Test when an Iceberg catalog is provided, but not properly configured. + """ + with pytest.raises(ValidationError): + IcebergCatalogConfig() # type: ignore + + with pytest.raises(ValidationError, match="conf"): + IcebergCatalogConfig(type="a type") # type: ignore + + with pytest.raises(ValidationError, match="type"): + IcebergCatalogConfig(conf={}) # type: ignore + + def test_config_for_tests(): + """ + Test valid iceberg source that will be used in unit tests. + """ + with_iceberg_source() + + @pytest.mark.parametrize( + "iceberg_type, expected_schema_field_type", + [ + (BinaryType(), BytesTypeClass), + (BooleanType(), BooleanTypeClass), + (DateType(), DateTypeClass), + ( + DecimalType(3, 2), + NumberTypeClass, + ), + (DoubleType(), NumberTypeClass), + (FixedType(4), FixedTypeClass), + (FloatType(), NumberTypeClass), + (IntegerType(), NumberTypeClass), + (LongType(), NumberTypeClass), + (StringType(), StringTypeClass), + ( + TimestampType(), + TimeTypeClass, + ), + ( + TimestamptzType(), + TimeTypeClass, + ), + (TimeType(), TimeTypeClass), + ( + UUIDType(), + StringTypeClass, + ), + ], ) - iceberg_source_instance = with_iceberg_source() - schema_fields = iceberg_source_instance._get_schema_fields_for_column(list_column) - assert len(schema_fields) == 1, f"Expected 1 field, but got {len(schema_fields)}" - assert_field( - schema_fields[0], list_column.doc, list_column.is_optional, ArrayTypeClass + def test_iceberg_primitive_type_to_schema_field( + iceberg_type: PrimitiveType, expected_schema_field_type: Any + ) -> None: + """ + Test converting a primitive typed Iceberg field to a SchemaField + """ + iceberg_source_instance = with_iceberg_source() + for column in [ + NestedField( + 1, "required_field", iceberg_type, True, "required field documentation" + ), + NestedField( + 1, "optional_field", iceberg_type, False, "optional field documentation" + ), + ]: + schema = Schema(column) + schema_fields = iceberg_source_instance._get_schema_fields_for_schema( + schema + ) + assert ( + len(schema_fields) == 1 + ), f"Expected 1 field, but got {len(schema_fields)}" + assert_field( + schema_fields[0], + column.doc, + column.optional, + expected_schema_field_type, + ) + + @pytest.mark.parametrize( + "iceberg_type, expected_array_nested_type", + [ + (BinaryType(), "bytes"), + (BooleanType(), "boolean"), + (DateType(), "date"), + ( + DecimalType(3, 2), + "decimal", + ), + (DoubleType(), "double"), + (FixedType(4), "fixed"), + (FloatType(), "float"), + (IntegerType(), "int"), + (LongType(), "long"), + (StringType(), "string"), + ( + TimestampType(), + "timestamp-micros", + ), + ( + TimestamptzType(), + "timestamp-micros", + ), + (TimeType(), "time-micros"), + ( + UUIDType(), + "uuid", + ), + ], ) - assert isinstance( - schema_fields[0].type.type, ArrayType - ), f"Field type {schema_fields[0].type.type} was expected to be {ArrayType}" - arrayType: ArrayType = schema_fields[0].type.type - assert arrayType.nestedType == [ - expected_array_nested_type - ], f"List Field nested type {arrayType.nestedType} was expected to be {expected_array_nested_type}" - - -@pytest.mark.parametrize( - "iceberg_type, expected_map_type", - [ - (IcebergTypes.BinaryType.get(), BytesTypeClass), - (IcebergTypes.BooleanType.get(), BooleanTypeClass), - (IcebergTypes.DateType.get(), DateTypeClass), - ( - IcebergTypes.DecimalType.of(3, 2), - NumberTypeClass, - ), - (IcebergTypes.DoubleType.get(), NumberTypeClass), - (IcebergTypes.FixedType.of_length(4), FixedTypeClass), - (IcebergTypes.FloatType.get(), NumberTypeClass), - (IcebergTypes.IntegerType.get(), NumberTypeClass), - (IcebergTypes.LongType.get(), NumberTypeClass), - (IcebergTypes.StringType.get(), StringTypeClass), - ( - IcebergTypes.TimestampType.with_timezone(), - TimeTypeClass, - ), - ( - IcebergTypes.TimestampType.without_timezone(), - TimeTypeClass, - ), - (IcebergTypes.TimeType.get(), TimeTypeClass), - ( - IcebergTypes.UUIDType.get(), - StringTypeClass, - ), - ], -) -def test_iceberg_map_to_schema_field( - iceberg_type: IcebergTypes.PrimitiveType, expected_map_type: Any -) -> None: - """ - Test converting a map typed Iceberg field to a MapType SchemaField, where the key is the same type as the value. - """ - map_column: NestedField = NestedField.required( - 1, - "mapField", - IcebergTypes.MapType.of_required(11, 12, iceberg_type, iceberg_type), - "documentation", + def test_iceberg_list_to_schema_field( + iceberg_type: PrimitiveType, expected_array_nested_type: Any + ) -> None: + """ + Test converting a list typed Iceberg field to an ArrayType SchemaField, including the list nested type. + """ + for list_column in [ + NestedField( + 1, + "listField", + ListType(2, iceberg_type, True), + True, + "required field, required element documentation", + ), + NestedField( + 1, + "listField", + ListType(2, iceberg_type, False), + True, + "required field, optional element documentation", + ), + NestedField( + 1, + "listField", + ListType(2, iceberg_type, True), + False, + "optional field, required element documentation", + ), + NestedField( + 1, + "listField", + ListType(2, iceberg_type, False), + False, + "optional field, optional element documentation", + ), + ]: + iceberg_source_instance = with_iceberg_source() + schema = Schema(list_column) + schema_fields = iceberg_source_instance._get_schema_fields_for_schema( + schema + ) + assert ( + len(schema_fields) == 1 + ), f"Expected 1 field, but got {len(schema_fields)}" + assert_field( + schema_fields[0], list_column.doc, list_column.optional, ArrayTypeClass + ) + assert isinstance( + schema_fields[0].type.type, ArrayType + ), f"Field type {schema_fields[0].type.type} was expected to be {ArrayType}" + arrayType: ArrayType = schema_fields[0].type.type + assert arrayType.nestedType == [ + expected_array_nested_type + ], f"List Field nested type {arrayType.nestedType} was expected to be {expected_array_nested_type}" + + @pytest.mark.parametrize( + "iceberg_type, expected_map_type", + [ + (BinaryType(), BytesTypeClass), + (BooleanType(), BooleanTypeClass), + (DateType(), DateTypeClass), + ( + DecimalType(3, 2), + NumberTypeClass, + ), + (DoubleType(), NumberTypeClass), + (FixedType(4), FixedTypeClass), + (FloatType(), NumberTypeClass), + (IntegerType(), NumberTypeClass), + (LongType(), NumberTypeClass), + (StringType(), StringTypeClass), + ( + TimestampType(), + TimeTypeClass, + ), + ( + TimestamptzType(), + TimeTypeClass, + ), + (TimeType(), TimeTypeClass), + ( + UUIDType(), + StringTypeClass, + ), + ], ) - iceberg_source_instance = with_iceberg_source() - schema_fields = iceberg_source_instance._get_schema_fields_for_column(map_column) - # Converting an Iceberg Map type will be done by creating an array of struct(key, value) records. - # The first field will be the array. - assert len(schema_fields) == 3, f"Expected 3 fields, but got {len(schema_fields)}" - assert_field( - schema_fields[0], map_column.doc, map_column.is_optional, ArrayTypeClass + def test_iceberg_map_to_schema_field( + iceberg_type: PrimitiveType, expected_map_type: Any + ) -> None: + """ + Test converting a map typed Iceberg field to a MapType SchemaField, where the key is the same type as the value. + """ + for map_column in [ + NestedField( + 1, + "mapField", + MapType(11, iceberg_type, 12, iceberg_type, True), + True, + "required field, required value documentation", + ), + NestedField( + 1, + "mapField", + MapType(11, iceberg_type, 12, iceberg_type, False), + True, + "required field, optional value documentation", + ), + NestedField( + 1, + "mapField", + MapType(11, iceberg_type, 12, iceberg_type, True), + False, + "optional field, required value documentation", + ), + NestedField( + 1, + "mapField", + MapType(11, iceberg_type, 12, iceberg_type, False), + False, + "optional field, optional value documentation", + ), + ]: + iceberg_source_instance = with_iceberg_source() + schema = Schema(map_column) + schema_fields = iceberg_source_instance._get_schema_fields_for_schema( + schema + ) + # Converting an Iceberg Map type will be done by creating an array of struct(key, value) records. + # The first field will be the array. + assert ( + len(schema_fields) == 3 + ), f"Expected 3 fields, but got {len(schema_fields)}" + assert_field( + schema_fields[0], map_column.doc, map_column.optional, ArrayTypeClass + ) + + # The second field will be the key type + assert_field(schema_fields[1], None, False, expected_map_type) + + # The third field will be the value type + assert_field( + schema_fields[2], + None, + not map_column.field_type.value_required, + expected_map_type, + ) + + @pytest.mark.parametrize( + "iceberg_type, expected_schema_field_type", + [ + (BinaryType(), BytesTypeClass), + (BooleanType(), BooleanTypeClass), + (DateType(), DateTypeClass), + ( + DecimalType(3, 2), + NumberTypeClass, + ), + (DoubleType(), NumberTypeClass), + (FixedType(4), FixedTypeClass), + (FloatType(), NumberTypeClass), + (IntegerType(), NumberTypeClass), + (LongType(), NumberTypeClass), + (StringType(), StringTypeClass), + ( + TimestampType(), + TimeTypeClass, + ), + ( + TimestamptzType(), + TimeTypeClass, + ), + (TimeType(), TimeTypeClass), + ( + UUIDType(), + StringTypeClass, + ), + ], ) + def test_iceberg_struct_to_schema_field( + iceberg_type: PrimitiveType, expected_schema_field_type: Any + ) -> None: + """ + Test converting a struct typed Iceberg field to a RecordType SchemaField. + """ + field1 = NestedField(11, "field1", iceberg_type, True, "field documentation") + struct_column = NestedField( + 1, "structField", StructType(field1), True, "struct documentation" + ) + iceberg_source_instance = with_iceberg_source() + schema = Schema(struct_column) + schema_fields = iceberg_source_instance._get_schema_fields_for_schema(schema) + assert ( + len(schema_fields) == 2 + ), f"Expected 2 fields, but got {len(schema_fields)}" + assert_field( + schema_fields[0], struct_column.doc, struct_column.optional, RecordTypeClass + ) + assert_field( + schema_fields[1], field1.doc, field1.optional, expected_schema_field_type + ) - # The second field will be the key type - assert_field(schema_fields[1], None, False, expected_map_type) - - # The third field will be the value type - assert_field(schema_fields[2], None, True, expected_map_type) - - -@pytest.mark.parametrize( - "iceberg_type, expected_schema_field_type", - [ - (IcebergTypes.BinaryType.get(), BytesTypeClass), - (IcebergTypes.BooleanType.get(), BooleanTypeClass), - (IcebergTypes.DateType.get(), DateTypeClass), - ( - IcebergTypes.DecimalType.of(3, 2), - NumberTypeClass, - ), - (IcebergTypes.DoubleType.get(), NumberTypeClass), - (IcebergTypes.FixedType.of_length(4), FixedTypeClass), - (IcebergTypes.FloatType.get(), NumberTypeClass), - (IcebergTypes.IntegerType.get(), NumberTypeClass), - (IcebergTypes.LongType.get(), NumberTypeClass), - (IcebergTypes.StringType.get(), StringTypeClass), - ( - IcebergTypes.TimestampType.with_timezone(), - TimeTypeClass, - ), - ( - IcebergTypes.TimestampType.without_timezone(), - TimeTypeClass, - ), - (IcebergTypes.TimeType.get(), TimeTypeClass), - ( - IcebergTypes.UUIDType.get(), - StringTypeClass, - ), - ], -) -def test_iceberg_struct_to_schema_field( - iceberg_type: IcebergTypes.PrimitiveType, expected_schema_field_type: Any -) -> None: - """ - Test converting a struct typed Iceberg field to a RecordType SchemaField. - """ - field1: NestedField = NestedField.required( - 11, "field1", iceberg_type, "field documentation" - ) - struct_column: NestedField = NestedField.required( - 1, "structField", IcebergTypes.StructType.of([field1]), "struct documentation" + @pytest.mark.parametrize( + "value_type, value, expected_value", + [ + (BinaryType(), bytes([1, 2, 3, 4, 5]), "b'\\x01\\x02\\x03\\x04\\x05'"), + (BooleanType(), True, "True"), + (DateType(), 19543, "2023-07-05"), + (DecimalType(3, 2), Decimal((0, (3, 1, 4), -2)), "3.14"), + (DoubleType(), 3.4, "3.4"), + (FixedType(4), bytes([1, 2, 3, 4]), "b'\\x01\\x02\\x03\\x04'"), + (FloatType(), 3.4, "3.4"), + (IntegerType(), 3, "3"), + (LongType(), 4294967295000, "4294967295000"), + (StringType(), "a string", "a string"), + ( + TimestampType(), + 1688559488157000, + "2023-07-05T12:18:08.157000", + ), + ( + TimestamptzType(), + 1688559488157000, + "2023-07-05T12:18:08.157000+00:00", + ), + (TimeType(), 40400000000, "11:13:20"), + ( + UUIDType(), + uuid.UUID("00010203-0405-0607-0809-0a0b0c0d0e0f"), + "00010203-0405-0607-0809-0a0b0c0d0e0f", + ), + ], ) - iceberg_source_instance = with_iceberg_source() - schema_fields = iceberg_source_instance._get_schema_fields_for_column(struct_column) - assert len(schema_fields) == 2, f"Expected 2 fields, but got {len(schema_fields)}" - assert_field( - schema_fields[0], struct_column.doc, struct_column.is_optional, RecordTypeClass - ) - assert_field( - schema_fields[1], field1.doc, field1.is_optional, expected_schema_field_type - ) - - -def test_avro_decimal_bytes_nullable(): - """ - The following test exposes a problem with decimal (bytes) not preserving extra attributes like _nullable. Decimal (fixed) and Boolean for example do. - NOTE: This bug was by-passed by mapping the Decimal type to fixed instead of bytes. - """ - import avro.schema + def test_iceberg_profiler_value_render( + value_type: IcebergType, value: Any, expected_value: Optional[str] + ) -> None: + iceberg_profiler_instance = with_iceberg_profiler() + assert ( + iceberg_profiler_instance._render_value("a.dataset", value_type, value) + == expected_value + ) - decimal_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "bytes", "precision": 3, "scale": 2, "logicalType": "decimal", "native_data_type": "decimal(3, 2)", "_nullable": false}, "name": "required_field", "doc": "required field documentation"}]}""" - decimal_avro_schema = avro.schema.parse(decimal_avro_schema_string) - print("\nDecimal (bytes)") - print( - f"Original avro schema string: {decimal_avro_schema_string}" - ) - print(f"After avro parsing, _nullable attribute is missing: {decimal_avro_schema}") + def test_avro_decimal_bytes_nullable() -> None: + """ + The following test exposes a problem with decimal (bytes) not preserving extra attributes like _nullable. Decimal (fixed) and Boolean for example do. + NOTE: This bug was by-passed by mapping the Decimal type to fixed instead of bytes. + """ + import avro.schema + + decimal_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "bytes", "precision": 3, "scale": 2, "logicalType": "decimal", "native_data_type": "decimal(3, 2)", "_nullable": false}, "name": "required_field", "doc": "required field documentation"}]}""" + decimal_avro_schema = avro.schema.parse(decimal_avro_schema_string) + print("\nDecimal (bytes)") + print( + f"Original avro schema string: {decimal_avro_schema_string}" + ) + print( + f"After avro parsing, _nullable attribute is missing: {decimal_avro_schema}" + ) - decimal_fixed_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "fixed", "logicalType": "decimal", "precision": 3, "scale": 2, "native_data_type": "decimal(3, 2)", "_nullable": false, "name": "bogusName", "size": 16}, "name": "required_field", "doc": "required field documentation"}]}""" - decimal_fixed_avro_schema = avro.schema.parse(decimal_fixed_avro_schema_string) - print("\nDecimal (fixed)") - print( - f"Original avro schema string: {decimal_fixed_avro_schema_string}" - ) - print( - f"After avro parsing, _nullable attribute is preserved: {decimal_fixed_avro_schema}" - ) + decimal_fixed_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "fixed", "logicalType": "decimal", "precision": 3, "scale": 2, "native_data_type": "decimal(3, 2)", "_nullable": false, "name": "bogusName", "size": 16}, "name": "required_field", "doc": "required field documentation"}]}""" + decimal_fixed_avro_schema = avro.schema.parse(decimal_fixed_avro_schema_string) + print("\nDecimal (fixed)") + print( + f"Original avro schema string: {decimal_fixed_avro_schema_string}" + ) + print( + f"After avro parsing, _nullable attribute is preserved: {decimal_fixed_avro_schema}" + ) - boolean_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "boolean", "native_data_type": "boolean", "_nullable": false}, "name": "required_field", "doc": "required field documentation"}]}""" - boolean_avro_schema = avro.schema.parse(boolean_avro_schema_string) - print("\nBoolean") - print( - f"Original avro schema string: {boolean_avro_schema_string}" - ) - print( - f"After avro parsing, _nullable attribute is preserved: {boolean_avro_schema}" - ) + boolean_avro_schema_string = """{"type": "record", "name": "__struct_", "fields": [{"type": {"type": "boolean", "native_data_type": "boolean", "_nullable": false}, "name": "required_field", "doc": "required field documentation"}]}""" + boolean_avro_schema = avro.schema.parse(boolean_avro_schema_string) + print("\nBoolean") + print( + f"Original avro schema string: {boolean_avro_schema_string}" + ) + print( + f"After avro parsing, _nullable attribute is preserved: {boolean_avro_schema}" + ) From d78a7905d754cf1d4343ea266ef93f39fcc37df3 Mon Sep 17 00:00:00 2001 From: Hendrik Richert Date: Thu, 31 Aug 2023 20:28:03 +0200 Subject: [PATCH 30/41] Allow frontend to use http proxy (#8691) Co-authored-by: Hendrik Richert Co-authored-by: RyanHolstien --- docker/datahub-frontend/start.sh | 16 +++++ docs-website/sidebars.js | 1 + .../guides/sso/configure-oidc-behind-proxy.md | 64 +++++++++++++++++++ 3 files changed, 81 insertions(+) create mode 100644 docs/authentication/guides/sso/configure-oidc-behind-proxy.md diff --git a/docker/datahub-frontend/start.sh b/docker/datahub-frontend/start.sh index a1548670309b5..9dc1514144bb1 100755 --- a/docker/datahub-frontend/start.sh +++ b/docker/datahub-frontend/start.sh @@ -26,6 +26,21 @@ if [[ ! -z ${SSL_TRUSTSTORE_PASSWORD:-} ]]; then TRUSTSTORE_PASSWORD="-Djavax.net.ssl.trustStorePassword=$SSL_TRUSTSTORE_PASSWORD" fi +HTTP_PROXY="" +if [[ ! -z ${HTTP_PROXY_HOST:-} ]] && [[ ! -z ${HTTP_PROXY_PORT:-} ]]; then + HTTP_PROXY="-Dhttp.proxyHost=$HTTP_PROXY_HOST -Dhttp.proxyPort=$HTTP_PROXY_PORT" +fi + +HTTPS_PROXY="" +if [[ ! -z ${HTTPS_PROXY_HOST:-} ]] && [[ ! -z ${HTTPS_PROXY_PORT:-} ]]; then + HTTPS_PROXY="-Dhttps.proxyHost=$HTTPS_PROXY_HOST -Dhttps.proxyPort=$HTTPS_PROXY_PORT" +fi + +NO_PROXY="" +if [[ ! -z ${HTTP_NON_PROXY_HOSTS:-} ]]; then + NO_PROXY="-Dhttp.nonProxyHosts='$HTTP_NON_PROXY_HOSTS'" +fi + # make sure there is no whitespace at the beginning and the end of # this string export JAVA_OPTS="-Xms512m \ @@ -37,6 +52,7 @@ export JAVA_OPTS="-Xms512m \ -Dlogback.debug=false \ ${PROMETHEUS_AGENT:-} ${OTEL_AGENT:-} \ ${TRUSTSTORE_FILE:-} ${TRUSTSTORE_TYPE:-} ${TRUSTSTORE_PASSWORD:-} \ + ${HTTP_PROXY:-} ${HTTPS_PROXY:-} ${NO_PROXY:-} \ -Dpidfile.path=/dev/null" exec ./datahub-frontend/bin/datahub-frontend diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 64433a2615f34..fcf82b786a1b9 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -168,6 +168,7 @@ module.exports = { "docs/authentication/guides/sso/configure-oidc-react-google", "docs/authentication/guides/sso/configure-oidc-react-okta", "docs/authentication/guides/sso/configure-oidc-react-azure", + "docs/authentication/guides/sso/configure-oidc-behind-proxy", ], }, ], diff --git a/docs/authentication/guides/sso/configure-oidc-behind-proxy.md b/docs/authentication/guides/sso/configure-oidc-behind-proxy.md new file mode 100644 index 0000000000000..c998816e04735 --- /dev/null +++ b/docs/authentication/guides/sso/configure-oidc-behind-proxy.md @@ -0,0 +1,64 @@ +# Configuring Frontend to use a Proxy when communicating with SSO Provider +*Authored on 22/08/2023* + +The `datahub-frontend-react` server can be configured to use an http proxy when retrieving the openid-configuration. +This can be needed if your infrastructure is locked down and disallows connectivity by default, using proxies for fine-grained egress control. + +## Configure http proxy and non proxy hosts + +To do this, you will need to pass a set of environment variables to the datahub-frontend-react container (e.g. in the `docker-compose.yml` file or your kubernetes manifest). + +``` +HTTP_PROXY_HOST=host of your http proxy +HTTP_PROXY_PORT=port of your http proxy +HTTPS_PROXY_HOST=host of your http(s) proxy used for https connections (often the same as the http proxy) +HTTPS_PROXY_PORT=port of your http(s) proxy used for https connections (often the same as the http proxy) +HTTP_NON_PROXY_HOSTS=localhost|datahub-gms (or any other hosts that you would like to bypass the proxy for, delimited by pipe) +``` + +## Optional: provide custom truststore +If your upstream proxy performs SSL termination to inspect traffic, this will result in different (self-signed) certificates for HTTPS connections. +The default truststore used in the `datahub-frontend-react` docker image will not trust these kinds of connections. +To address this, you can copy or mount your own truststore (provided by the proxy or network administrators) into the docker container. + +Depending on your setup, you have a few options to achieve this: + +### Make truststore available in the frontend + +#### Option a) Build frontend docker image with your own truststore included + +To build a custom image for your frontend, with the certificates built-in, you can use the official frontend image as a base, then copy in your required files. + +Example Dockerfile: + +```dockerfile +FROM linkedin/datahub-frontend-react: +COPY /truststore-directory /certificates +``` + +Building this Dockerfile will result in your own custom docker image on your local machine. +You will then be able to tag it, publish it to your own registry, etc. + +#### Option b) Mount truststore from your host machine using a docker volume + +Adapt your docker-compose.yml to include a new volume mount in the `datahub-frontend-react` container + +```docker + datahub-frontend-react: + # ... + volumes: + # ... + - /truststore-directory:/certificates +``` + +### Reference new truststore + +Add the following environment values to the `datahub-frontend-react` container: + +``` +SSL_TRUSTSTORE_FILE=path/to/truststore.jks (e.g. /certificates) +SSL_TRUSTSTORE_TYPE=jks +SSL_TRUSTSTORE_PASSWORD=MyTruststorePassword +``` + +Once these steps are done, your frontend container will use the new truststore when validating SSL/HTTPS connections. From ee06a65c29cc80d54d685dab122c04221d35ce75 Mon Sep 17 00:00:00 2001 From: Zachary McNellis Date: Thu, 31 Aug 2023 12:08:09 -0700 Subject: [PATCH 31/41] docs(observe): Dataset Profile volume assertion guide (#8764) --- docs/managed-datahub/observe/volume-assertions.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/managed-datahub/observe/volume-assertions.md b/docs/managed-datahub/observe/volume-assertions.md index 8c7676ca9afbb..5f5aff33a5ce2 100644 --- a/docs/managed-datahub/observe/volume-assertions.md +++ b/docs/managed-datahub/observe/volume-assertions.md @@ -123,6 +123,10 @@ source types vary by the platform, but generally fall into these categories: - **Query**: A `COUNT(*)` query is used to retrieve the latest row count for a table, with optional SQL filters applied (depending on platform). This can be less efficient to check depending on the size of the table. This approach is more portable, as it does not involve system warehouse tables, it is also easily portable across Data Warehouse and Data Lake providers. + +- **DataHub Dataset Profile**: The DataHub Dataset Profile aspect is used to retrieve the latest row count information for a table. + Using this option avoids contacting your data platform, and instead uses the DataHub Dataset Profile metadata to evaluate Volume Assertions. + Note if you have not configured an ingestion source through DataHub, then this may be the only option available. Volume Assertions also have an off switch: they can be started or stopped at any time with the click of button. @@ -166,7 +170,7 @@ Once these are in place, you're ready to create your Volume Assertions!

7. (Optional) Click **Advanced** to customize the volume **source**. This is the mechanism that will be used to obtain the table - row count metric. Each Data Platform supports different options including Information Schema and Query. + row count metric. Each Data Platform supports different options including Information Schema, Query, and DataHub Dataset Profile.

@@ -174,6 +178,7 @@ Once these are in place, you're ready to create your Volume Assertions! - **Information Schema**: Check the Data Platform system metadata tables to determine the table row count. - **Query**: Issue a `COUNT(*)` query to the table to determine the row count. +- **DataHub Dataset Profile**: Use the DataHub Dataset Profile metadata to determine the row count. 8. Click **Next** 9. Configure actions that should be taken when the Volume Assertion passes or fails From 5458c6b014e6781472f4b79f60ae384e08c2c334 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Fri, 1 Sep 2023 14:23:46 +0900 Subject: [PATCH 32/41] docs:fix broken img links under managed-datahub (#8769) --- docs/managed-datahub/chrome-extension.md | 12 ++++++------ .../datahub-api/graphql-api/getting-started.md | 2 +- .../datahub-api/graphql-api/incidents-api-beta.md | 2 +- .../integrations/oidc-sso-integration.md | 2 +- .../metadata-ingestion-with-acryl/ingestion.md | 4 ++-- .../setting-up-remote-ingestion-executor-on-aws.md | 14 +++++++------- 6 files changed, 18 insertions(+), 18 deletions(-) diff --git a/docs/managed-datahub/chrome-extension.md b/docs/managed-datahub/chrome-extension.md index c6840f4e8e221..0aa0860d03b67 100644 --- a/docs/managed-datahub/chrome-extension.md +++ b/docs/managed-datahub/chrome-extension.md @@ -12,7 +12,7 @@ In order to use the Acryl DataHub Chrome extension, you need to download it onto

- +

@@ -26,7 +26,7 @@ Once you have your extension installed, you'll need to configure it to work with

- +

@@ -34,7 +34,7 @@ Once you have your extension installed, you'll need to configure it to work with

- +

@@ -48,7 +48,7 @@ Some organizations have custom SaaS domains for Looker and some Acryl DataHub de

- +

@@ -56,7 +56,7 @@ Some organizations have custom SaaS domains for Looker and some Acryl DataHub de

- +

@@ -74,7 +74,7 @@ Once you have everything configured on your extension, it's time to use it!

- +

diff --git a/docs/managed-datahub/datahub-api/graphql-api/getting-started.md b/docs/managed-datahub/datahub-api/graphql-api/getting-started.md index 57d46f05c4e0c..736bf6fea6811 100644 --- a/docs/managed-datahub/datahub-api/graphql-api/getting-started.md +++ b/docs/managed-datahub/datahub-api/graphql-api/getting-started.md @@ -12,7 +12,7 @@ For a full reference to the Queries & Mutations available for consumption, check

- +

diff --git a/docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta.md b/docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta.md index bfd8e8f2dae1b..16d83d2f57575 100644 --- a/docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta.md +++ b/docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta.md @@ -406,7 +406,7 @@ These notifications are also able to tag the immediate asset's owners, along wit

- +

diff --git a/docs/managed-datahub/integrations/oidc-sso-integration.md b/docs/managed-datahub/integrations/oidc-sso-integration.md index c0f5069d849fa..ec4ca311a0de5 100644 --- a/docs/managed-datahub/integrations/oidc-sso-integration.md +++ b/docs/managed-datahub/integrations/oidc-sso-integration.md @@ -44,6 +44,6 @@ To enable the OIDC integration, start by navigating to **Settings > Platform > S

- +

diff --git a/docs/managed-datahub/metadata-ingestion-with-acryl/ingestion.md b/docs/managed-datahub/metadata-ingestion-with-acryl/ingestion.md index e225fd8b014c8..0444d15b3627c 100644 --- a/docs/managed-datahub/metadata-ingestion-with-acryl/ingestion.md +++ b/docs/managed-datahub/metadata-ingestion-with-acryl/ingestion.md @@ -58,13 +58,13 @@ In Acryl DataHub deployments, you _must_ use a sink of type `datahub-rest`, whic The token can be retrieved by logging in as admin. You can go to Settings page and generate a Personal Access Token with your desired expiration date.

- +

- +

diff --git a/docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws.md b/docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws.md index 6c6cce51ea098..b8fb0ea9e80f1 100644 --- a/docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws.md +++ b/docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws.md @@ -19,7 +19,7 @@ For example, if an ingestion source is not publicly accessible via the internet,

- +

@@ -27,7 +27,7 @@ To accommodate these cases, Acryl supports configuring a remote ingestion execut

- +

@@ -50,13 +50,13 @@ To accommodate these cases, Acryl supports configuring a remote ingestion execut

- +

- +

@@ -67,7 +67,7 @@ To accommodate these cases, Acryl supports configuring a remote ingestion execut

- +

3. In the 'Finish Up' step, click '**Advanced'**. @@ -78,7 +78,7 @@ To accommodate these cases, Acryl supports configuring a remote ingestion execut

- +

## Updating a Remote Ingestion Executor @@ -92,7 +92,7 @@ In order to update the executor, ie. to deploy a new container version, you'll n 6. Upload a copy of the Acryl Remote Executor [CloudFormation Template](https://raw.githubusercontent.com/acryldata/datahub-cloudformation/master/Ingestion/templates/python.ecs.template.yaml)

- +

7. Click **Next** From 31fc687786cbee3caa4643a7413cc4aeaaa749da Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Fri, 1 Sep 2023 18:14:28 +0900 Subject: [PATCH 33/41] fix:small typo on graphql tutorial (#8741) --- docs/api/tutorials/lineage.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/api/tutorials/lineage.md b/docs/api/tutorials/lineage.md index ce23a4d274e8e..dc43cb178f949 100644 --- a/docs/api/tutorials/lineage.md +++ b/docs/api/tutorials/lineage.md @@ -145,8 +145,8 @@ You can now see the column-level lineage between datasets. Note that you have to -```json -mutation searchAcrossLineage { +```graphql +query searchAcrossLineage { searchAcrossLineage( input: { query: "*" From 7dd6e09ac5afce8bc354b68fdb9739d04f22a6a2 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 1 Sep 2023 09:06:01 -0500 Subject: [PATCH 34/41] refactor(build): upgrade to gradle 7 & guava update (#8745) --- .github/workflows/spark-smoke-test.yml | 2 + build.gradle | 16 +-- buildSrc/build.gradle | 10 +- .../GenerateJsonSchemaTask.java | 6 +- datahub-frontend/build.gradle | 2 +- datahub-frontend/play.gradle | 14 +-- datahub-graphql-core/build.gradle | 23 ++--- datahub-upgrade/build.gradle | 39 ++++---- entity-registry/build.gradle | 28 +++--- .../custom-test-model/build.gradle | 6 +- gradle/wrapper/gradle-wrapper.properties | 2 +- ingestion-scheduler/build.gradle | 13 +-- li-utils/build.gradle | 16 +-- metadata-auth/auth-api/build.gradle | 12 +-- metadata-dao-impl/kafka-producer/build.gradle | 17 ++-- metadata-events/mxe-avro-1.7/build.gradle | 6 +- metadata-events/mxe-registration/build.gradle | 9 +- metadata-events/mxe-schemas/build.gradle | 4 + .../mxe-utils-avro-1.7/build.gradle | 11 ++- .../java/datahub-client/build.gradle | 19 ++-- .../java/datahub-protobuf/build.gradle | 1 + .../java/examples/build.gradle | 2 +- .../java/spark-lineage/build.gradle | 2 +- metadata-io/build.gradle | 98 ++++++++++--------- metadata-jobs/mae-consumer-job/build.gradle | 9 +- metadata-jobs/mae-consumer/build.gradle | 39 ++++---- metadata-jobs/mce-consumer-job/build.gradle | 13 ++- metadata-jobs/mce-consumer/build.gradle | 28 +++--- metadata-jobs/pe-consumer/build.gradle | 20 ++-- metadata-models-custom/build.gradle | 10 +- metadata-models-validator/build.gradle | 12 +-- metadata-models/build.gradle | 8 +- metadata-service/auth-config/build.gradle | 6 +- metadata-service/auth-filter/build.gradle | 14 +-- metadata-service/auth-impl/build.gradle | 16 +-- .../auth-servlet-impl/build.gradle | 18 ++-- metadata-service/configuration/build.gradle | 2 +- metadata-service/factories/build.gradle | 68 +++++++------ .../graphql-servlet-impl/build.gradle | 21 ++-- metadata-service/health-servlet/build.gradle | 19 ++-- .../controller/HealthCheckController.java | 1 + metadata-service/openapi-servlet/build.gradle | 42 ++++---- .../RelationshipsController.java | 8 +- .../src/test/sample-test-plugins/build.gradle | 1 + metadata-service/restli-client/build.gradle | 17 ++-- .../restli-servlet-impl/build.gradle | 43 ++++---- .../schema-registry-api/build.gradle | 30 +++--- .../schema-registry-servlet/build.gradle | 41 ++++---- metadata-service/services/build.gradle | 35 +++---- .../linkedin/metadata/service/TagService.java | 2 +- metadata-service/servlet/build.gradle | 19 ++-- metadata-service/war/build.gradle | 46 ++++----- metadata-utils/build.gradle | 39 ++++---- test-models/build.gradle | 6 +- 54 files changed, 531 insertions(+), 460 deletions(-) diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 5f501780873f6..ac411d812deea 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -40,6 +40,8 @@ jobs: python-version: "3.7" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh + - name: Remove images + run: docker image prune -a -f || true - name: Smoke test run: | ./gradlew :metadata-integration:java:spark-lineage:integrationTest \ diff --git a/build.gradle b/build.gradle index 3958f502b3b32..f73fe42d45956 100644 --- a/build.gradle +++ b/build.gradle @@ -1,7 +1,7 @@ buildscript { ext.junitJupiterVersion = '5.6.1' // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md - ext.pegasusVersion = '29.22.16' + ext.pegasusVersion = '29.45.0' ext.mavenVersion = '3.6.3' ext.springVersion = '5.3.29' ext.springBootVersion = '2.7.14' @@ -29,16 +29,16 @@ buildscript { classpath 'io.acryl.gradle.plugin:gradle-avro-plugin:0.8.1' classpath 'org.springframework.boot:spring-boot-gradle-plugin:' + springBootVersion classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.30.0" - classpath "com.palantir.gradle.gitversion:gradle-git-version:0.12.3" - classpath "org.gradle.playframework:gradle-playframework:0.12" - classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.18.1" + classpath "com.palantir.gradle.gitversion:gradle-git-version:3.0.0" + classpath "org.gradle.playframework:gradle-playframework:0.14" + classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.1" } } plugins { id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' id 'com.github.johnrengelman.shadow' version '6.1.0' - id "com.palantir.docker" version "0.35.0" + id 'com.palantir.docker' version '0.35.0' // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" } @@ -95,7 +95,7 @@ project.ext.externalDependency = [ 'graphqlJavaScalars': 'com.graphql-java:graphql-java-extended-scalars:19.1', 'gson': 'com.google.code.gson:gson:2.8.9', 'guice': 'com.google.inject:guice:4.2.3', - 'guava': 'com.google.guava:guava:27.0.1-jre', + 'guava': 'com.google.guava:guava:32.1.2-jre', 'h2': 'com.h2database:h2:2.1.214', 'hadoopCommon':'org.apache.hadoop:hadoop-common:2.7.2', 'hadoopMapreduceClient':'org.apache.hadoop:hadoop-mapreduce-client-core:2.7.2', @@ -239,7 +239,7 @@ configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) { subprojects { - apply plugin: 'maven' + apply plugin: 'maven-publish' apply plugin: 'com.gorylenko.gradle-git-properties' gitProperties { @@ -253,7 +253,7 @@ subprojects { plugins.withType(JavaPlugin) { dependencies { - testCompile externalDependency.testng + testImplementation externalDependency.testng constraints { implementation('io.netty:netty-all:4.1.86.Final') implementation('org.apache.commons:commons-compress:1.21') diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 981a0ab221217..f88d2bdb966ce 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -5,12 +5,12 @@ buildscript { } dependencies { - compile('io.acryl:json-schema-avro:0.1.5') { + implementation('io.acryl:json-schema-avro:0.1.5') { exclude group: 'com.fasterxml.jackson.core', module: 'jackson-databind' exclude group: 'com.google.guava', module: 'guava' } - compile 'com.google.guava:guava:27.0.1-jre' - compile 'com.fasterxml.jackson.core:jackson-databind:2.13.5' - compile 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.5' - compile 'commons-io:commons-io:2.11.0' + implementation 'com.google.guava:guava:32.1.2-jre' + implementation 'com.fasterxml.jackson.core:jackson-databind:2.13.5' + implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.5' + implementation 'commons-io:commons-io:2.11.0' } \ No newline at end of file diff --git a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java index a5a843d91b1eb..796d622860c15 100644 --- a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java +++ b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java @@ -21,10 +21,7 @@ import java.util.List; import java.util.stream.Collectors; import org.gradle.api.DefaultTask; -import org.gradle.api.tasks.CacheableTask; -import org.gradle.api.tasks.InputDirectory; -import org.gradle.api.tasks.OutputDirectory; -import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.*; import static com.github.fge.processing.ProcessingUtil.*; import static org.apache.commons.io.FilenameUtils.*; @@ -46,6 +43,7 @@ public void setInputDirectory(String inputDirectory) { } @InputDirectory + @PathSensitive(PathSensitivity.NAME_ONLY) public String getInputDirectory() { return inputDirectory; } diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index fda33e4a9a3c6..cf1f8ca3cdd84 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -1,7 +1,7 @@ plugins { id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" - id 'com.palantir.docker' id 'scala' + id 'com.palantir.docker' } apply from: "../gradle/versioning/versioning.gradle" diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index e7121d277926d..e40f8e3eeb96d 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -4,7 +4,7 @@ apply plugin: "org.gradle.playframework" project.ext.httpPort = 9001 project.ext.playBinaryBaseName = "datahub-frontend" -tasks.withType(PlayRun) { +runPlay { httpPort = project.ext.httpPort } @@ -33,8 +33,8 @@ dependencies { } } - compile project(":metadata-service:restli-client") - compile project(":metadata-service:auth-config") + implementation project(":metadata-service:restli-client") + implementation project(":metadata-service:auth-config") implementation externalDependency.jettyJaas implementation externalDependency.graphqlJava @@ -70,15 +70,15 @@ dependencies { testImplementation 'no.nav.security:mock-oauth2-server:0.3.1' testImplementation 'org.junit-pioneer:junit-pioneer:1.9.1' testImplementation externalDependency.junitJupiterApi - testRuntime externalDependency.junitJupiterEngine + testRuntimeOnly externalDependency.junitJupiterEngine implementation externalDependency.slf4jApi compileOnly externalDependency.lombok - runtime externalDependency.guice - runtime (externalDependency.playDocs) { + runtimeOnly externalDependency.guice + runtimeOnly (externalDependency.playDocs) { exclude group: 'com.typesafe.akka', module: 'akka-http-core_2.12' } - runtime externalDependency.playGuice + runtimeOnly externalDependency.playGuice implementation externalDependency.log4j2Api implementation externalDependency.logbackClassic diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index 8fd45033373dc..89ba8f17b6aeb 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -4,25 +4,26 @@ plugins { apply plugin: 'java' dependencies { - compile project(':metadata-service:restli-client') - compile project(':metadata-service:auth-impl') - compile project(':metadata-service:auth-config') - compile project(':metadata-service:configuration') - compile project(':metadata-service:services') - compile project(':metadata-io') - compile project(':metadata-utils') + implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:auth-impl') + implementation project(':metadata-service:auth-config') + implementation project(':metadata-service:configuration') + implementation project(':metadata-service:services') + implementation project(':metadata-io') + implementation project(':metadata-utils') implementation externalDependency.graphqlJava implementation externalDependency.graphqlJavaScalars - compile externalDependency.antlr4Runtime - compile externalDependency.antlr4 - compile externalDependency.guava + implementation externalDependency.antlr4Runtime + implementation externalDependency.antlr4 + implementation externalDependency.guava + implementation externalDependency.opentelemetryAnnotations implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - testCompile externalDependency.mockito + testImplementation externalDependency.mockito } graphqlCodegen { diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 78d9f6a09948d..625cab6b354c5 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -12,14 +12,15 @@ ext { } dependencies { - compile project(':metadata-io') - compile project(':metadata-service:factories') - compile project(':metadata-service:restli-client') - compile project(':metadata-service:configuration') + implementation project(':metadata-io') + implementation project(':metadata-service:factories') + implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:configuration') + implementation project(':metadata-dao-impl:kafka-producer') implementation externalDependency.charle - compile externalDependency.javaxInject - compile(externalDependency.hadoopClient) { + implementation externalDependency.javaxInject + implementation(externalDependency.hadoopClient) { exclude group: 'net.minidev', module: 'json-smart' exclude group: 'com.nimbusds', module: 'nimbus-jose-jwt' exclude group: "org.apache.htrace", module: "htrace-core4" @@ -52,18 +53,18 @@ dependencies { implementation externalDependency.slf4jApi compileOnly externalDependency.lombok - compile externalDependency.picocli - compile externalDependency.parquet + implementation externalDependency.picocli + implementation externalDependency.parquet implementation externalDependency.protobuf - compile externalDependency.springBeans - compile externalDependency.springBootAutoconfigure - compile externalDependency.springCore - compile externalDependency.springKafka + implementation externalDependency.springBeans + implementation externalDependency.springBootAutoconfigure + implementation externalDependency.springCore + implementation externalDependency.springKafka - runtime externalDependency.logbackClassic - runtime externalDependency.mariadbConnector - runtime externalDependency.mysqlConnector - runtime externalDependency.postgresql + runtimeOnly externalDependency.logbackClassic + runtimeOnly externalDependency.mariadbConnector + runtimeOnly externalDependency.mysqlConnector + runtimeOnly externalDependency.postgresql implementation externalDependency.awsMskIamAuth @@ -71,9 +72,9 @@ dependencies { annotationProcessor externalDependency.picocli testImplementation externalDependency.springBootTest - testCompile externalDependency.mockito - testCompile externalDependency.testng - testRuntime externalDependency.logbackClassic + testImplementation externalDependency.mockito + testImplementation externalDependency.testng + testRuntimeOnly externalDependency.logbackClassic } bootJar { diff --git a/entity-registry/build.gradle b/entity-registry/build.gradle index af742d240d1e6..3da0bf5bb4fb8 100644 --- a/entity-registry/build.gradle +++ b/entity-registry/build.gradle @@ -1,16 +1,17 @@ apply plugin: 'pegasus' +apply plugin: 'java-library' dependencies { - compile spec.product.pegasus.data - compile spec.product.pegasus.generator - compile project(path: ':metadata-models') + implementation spec.product.pegasus.data + implementation spec.product.pegasus.generator + api project(path: ':metadata-models') implementation externalDependency.slf4jApi compileOnly externalDependency.lombok - compile externalDependency.guava - compile externalDependency.jacksonDataBind - compile externalDependency.jacksonDataFormatYaml - compile externalDependency.reflections - compile externalDependency.jsonPatch + implementation externalDependency.guava + implementation externalDependency.jacksonDataBind + implementation externalDependency.jacksonDataFormatYaml + implementation externalDependency.reflections + implementation externalDependency.jsonPatch constraints { implementation(externalDependency.snakeYaml) { because("previous versions are vulnerable to CVE-2022-25857") @@ -19,12 +20,13 @@ dependencies { dataModel project(':li-utils') annotationProcessor externalDependency.lombok - compile externalDependency.mavenArtifact + api externalDependency.mavenArtifact - testCompile project(':test-models') - testCompile externalDependency.testng - testCompile externalDependency.mockito - testCompile externalDependency.mockitoInline + testImplementation project(':test-models') + testImplementation project(path: ':test-models', configuration: 'testDataTemplate') + testImplementation externalDependency.testng + testImplementation externalDependency.mockito + testImplementation externalDependency.mockitoInline } compileTestJava.dependsOn tasks.getByPath(':entity-registry:custom-test-model:modelDeploy') diff --git a/entity-registry/custom-test-model/build.gradle b/entity-registry/custom-test-model/build.gradle index 90f50fe1f2992..778e2e42b95c4 100644 --- a/entity-registry/custom-test-model/build.gradle +++ b/entity-registry/custom-test-model/build.gradle @@ -23,11 +23,11 @@ if (project.hasProperty('projVersion')) { dependencies { - compile spec.product.pegasus.data + implementation spec.product.pegasus.data // Uncomment these if you want to depend on models defined in core datahub - //compile project(':li-utils') + //implementation project(':li-utils') //dataModel project(':li-utils') - //compile project(':metadata-models') + //implementation project(':metadata-models') //dataModel project(':metadata-models') } diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index ec991f9aa12cb..98debb84d51de 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.9.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.2-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/ingestion-scheduler/build.gradle b/ingestion-scheduler/build.gradle index b15b5b8c52673..dc9887406b8b4 100644 --- a/ingestion-scheduler/build.gradle +++ b/ingestion-scheduler/build.gradle @@ -1,16 +1,17 @@ apply plugin: 'java' dependencies { - compile project(path: ':metadata-models') - compile project(path: ':metadata-io') - compile project(path: ':metadata-service:restli-client') - compile project(':metadata-service:configuration') + implementation project(path: ':metadata-models') + implementation project(path: ':metadata-io') + implementation project(path: ':metadata-service:restli-client') + implementation project(':metadata-service:configuration') + implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - testCompile externalDependency.mockito - testCompile externalDependency.testng + testImplementation externalDependency.mockito + testImplementation externalDependency.testng constraints { implementation(externalDependency.log4jCore) { diff --git a/li-utils/build.gradle b/li-utils/build.gradle index d11cd86659605..e8b672a3a21fa 100644 --- a/li-utils/build.gradle +++ b/li-utils/build.gradle @@ -1,4 +1,4 @@ -apply plugin: 'java' +apply plugin: 'java-library' apply plugin: 'pegasus' tasks.withType(JavaCompile).configureEach { @@ -13,19 +13,21 @@ tasks.withType(Test).configureEach { } dependencies { - compile spec.product.pegasus.data - compile externalDependency.commonsLang - compile(externalDependency.reflections) { + api spec.product.pegasus.data + implementation externalDependency.commonsLang + implementation(externalDependency.reflections) { exclude group: 'com.google.guava', module: 'guava' } - compile externalDependency.guava + implementation externalDependency.guava implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - testCompile externalDependency.assertJ - testCompile project(':test-models') + testImplementation externalDependency.assertJ + testImplementation externalDependency.commonsIo + testImplementation project(':test-models') + testImplementation project(path: ':test-models', configuration: 'testDataTemplate') } idea { diff --git a/metadata-auth/auth-api/build.gradle b/metadata-auth/auth-api/build.gradle index f82f488b6f182..2bf9e5243e152 100644 --- a/metadata-auth/auth-api/build.gradle +++ b/metadata-auth/auth-api/build.gradle @@ -3,7 +3,7 @@ plugins { } apply plugin: 'com.github.johnrengelman.shadow' -apply plugin: 'java' +apply plugin: 'java-library' apply plugin: 'signing' apply plugin: 'maven-publish' apply plugin: 'io.codearte.nexus-staging' @@ -28,14 +28,14 @@ shadowJar { dependencies() { implementation spec.product.pegasus.data implementation project(path: ':li-utils') - implementation project(path: ':metadata-utils') + api project(path: ':metadata-utils') - compile externalDependency.guava - compile externalDependency.lombok + implementation externalDependency.guava + implementation externalDependency.lombok annotationProcessor externalDependency.lombok - - testCompile externalDependency.testng + + testImplementation externalDependency.testng } task sourcesJar(type: Jar) { diff --git a/metadata-dao-impl/kafka-producer/build.gradle b/metadata-dao-impl/kafka-producer/build.gradle index 6b08ac50a4c17..393b10b0e9d24 100644 --- a/metadata-dao-impl/kafka-producer/build.gradle +++ b/metadata-dao-impl/kafka-producer/build.gradle @@ -1,20 +1,23 @@ apply plugin: 'java' dependencies { - compile project(':metadata-events:mxe-avro-1.7') - compile project(':metadata-events:mxe-registration') - compile project(':metadata-events:mxe-utils-avro-1.7') - compile project(':entity-registry') - compile project(':metadata-io') + implementation project(':metadata-events:mxe-avro-1.7') + implementation project(':metadata-events:mxe-registration') + implementation project(':metadata-events:mxe-utils-avro-1.7') + implementation project(':entity-registry') + implementation project(':metadata-io') - compile externalDependency.kafkaClients + implementation externalDependency.kafkaClients + implementation externalDependency.springBeans + implementation externalDependency.springContext + implementation externalDependency.opentelemetryAnnotations implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - testCompile externalDependency.mockito + testImplementation externalDependency.mockito constraints { implementation(externalDependency.log4jCore) { diff --git a/metadata-events/mxe-avro-1.7/build.gradle b/metadata-events/mxe-avro-1.7/build.gradle index 6bde1511bf280..e30406644913c 100644 --- a/metadata-events/mxe-avro-1.7/build.gradle +++ b/metadata-events/mxe-avro-1.7/build.gradle @@ -3,11 +3,11 @@ configurations { } apply plugin: 'io.acryl.gradle.plugin.avro' -apply plugin: 'java' +apply plugin: 'java-library' dependencies { - compile externalDependency.avro_1_7 - compile(externalDependency.avroCompiler_1_7) { + api externalDependency.avro_1_7 + implementation(externalDependency.avroCompiler_1_7) { exclude group: 'org.apache.velocity', module: 'velocity' } constraints { diff --git a/metadata-events/mxe-registration/build.gradle b/metadata-events/mxe-registration/build.gradle index aa5fad09f3fec..60e0da59616d9 100644 --- a/metadata-events/mxe-registration/build.gradle +++ b/metadata-events/mxe-registration/build.gradle @@ -5,11 +5,12 @@ configurations { } dependencies { - compile project(':metadata-events:mxe-avro-1.7') - compile project(':metadata-models') - compile spec.product.pegasus.dataAvro1_6 + implementation project(':metadata-events:mxe-avro-1.7') + implementation project(':metadata-models') + implementation spec.product.pegasus.dataAvro1_6 - testCompile project(':test-models') + testImplementation project(':test-models') + testImplementation project(path: ':test-models', configuration: 'testDataTemplate') avroOriginal project(path: ':metadata-models', configuration: 'avroSchema') diff --git a/metadata-events/mxe-schemas/build.gradle b/metadata-events/mxe-schemas/build.gradle index 0b3e621b8db15..fe46601fb68b7 100644 --- a/metadata-events/mxe-schemas/build.gradle +++ b/metadata-events/mxe-schemas/build.gradle @@ -11,6 +11,10 @@ task copyMetadataModels(type: Copy) { } generateAvroSchema.dependsOn copyMetadataModels +validateSchemaAnnotation.dependsOn copyMetadataModels +mainTranslateSchemas.dependsOn copyMetadataModels +generateDataTemplate.dependsOn copyMetadataModels +mainCopySchemas.dependsOn copyMetadataModels pegasus.main.generationModes = [PegasusGenerationMode.PEGASUS, PegasusGenerationMode.AVRO] task copyOriginalAvsc(type: Copy, dependsOn: generateAvroSchema) { diff --git a/metadata-events/mxe-utils-avro-1.7/build.gradle b/metadata-events/mxe-utils-avro-1.7/build.gradle index f8474e21daa0b..82249d393578c 100644 --- a/metadata-events/mxe-utils-avro-1.7/build.gradle +++ b/metadata-events/mxe-utils-avro-1.7/build.gradle @@ -1,11 +1,12 @@ -apply plugin: 'java' +apply plugin: 'java-library' dependencies { - compile project(':metadata-events:mxe-avro-1.7') - compile project(':metadata-models') - compile spec.product.pegasus.dataAvro1_6 + api project(':metadata-events:mxe-avro-1.7') + api project(':metadata-models') + api spec.product.pegasus.dataAvro1_6 - testCompile project(':test-models') + testImplementation project(':test-models') + testImplementation project(path: ':test-models', configuration: 'testDataTemplate') constraints { implementation(externalDependency.log4jCore) { diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index 82273427974af..e304bb5329c62 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -26,12 +26,11 @@ tasks.withType(Test).configureEach { } dependencies { - implementation project(':metadata-models') implementation(externalDependency.kafkaAvroSerializer) { exclude group: "org.apache.avro" } - compile externalDependency.avro_1_7 + implementation externalDependency.avro_1_7 constraints { implementation('commons-collections:commons-collections:3.2.2') { because 'Vulnerability Issue' @@ -48,12 +47,14 @@ dependencies { implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - testCompile externalDependency.mockito - testCompile externalDependency.mockServer - testCompile externalDependency.mockServerClient - testCompile externalDependency.testContainers - testCompile externalDependency.httpAsyncClient - testRuntime externalDependency.logbackClassic + // VisibleForTesting + compileOnly externalDependency.guava + testImplementation externalDependency.mockito + testImplementation externalDependency.mockServer + testImplementation externalDependency.mockServerClient + testImplementation externalDependency.testContainers + testImplementation externalDependency.httpAsyncClient + testRuntimeOnly externalDependency.logbackClassic swaggerCodegen 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.33' } @@ -139,6 +140,7 @@ checkShadowJar { assemble { dependsOn shadowJar } +compileTestJava.dependsOn shadowJar task sourcesJar(type: Jar) { archiveClassifier = 'sources' @@ -231,6 +233,7 @@ tasks.register('generateOpenApiPojos', GenerateSwaggerCode) { } compileJava.dependsOn generateOpenApiPojos +processResources.dependsOn generateOpenApiPojos sourceSets.main.java.srcDir "${generateOpenApiPojos.outputDir}/src/main/java" sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/resources" diff --git a/metadata-integration/java/datahub-protobuf/build.gradle b/metadata-integration/java/datahub-protobuf/build.gradle index fa33e6baab534..bc919119f8fac 100644 --- a/metadata-integration/java/datahub-protobuf/build.gradle +++ b/metadata-integration/java/datahub-protobuf/build.gradle @@ -30,6 +30,7 @@ dependencies { implementation project(':metadata-models') implementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow') + implementation externalDependency.guava implementation externalDependency.protobuf implementation externalDependency.jgrapht implementation externalDependency.gson diff --git a/metadata-integration/java/examples/build.gradle b/metadata-integration/java/examples/build.gradle index b9e8e253dc359..581e9f82da0dc 100644 --- a/metadata-integration/java/examples/build.gradle +++ b/metadata-integration/java/examples/build.gradle @@ -24,7 +24,7 @@ dependencies { implementation project(path: ':li-utils') implementation project(path: ':metadata-models') - compile project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow') + implementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow') implementation externalDependency.httpAsyncClient // Tests need a concrete log4j available. Providing it here diff --git a/metadata-integration/java/spark-lineage/build.gradle b/metadata-integration/java/spark-lineage/build.gradle index 7257cf0aabc35..7143ac4833143 100644 --- a/metadata-integration/java/spark-lineage/build.gradle +++ b/metadata-integration/java/spark-lineage/build.gradle @@ -145,7 +145,7 @@ assemble { dependsOn shadowJar } -task integrationTest(type: Exec, dependsOn: [shadowJar, ':docker:quickstart'] ) { +task integrationTest(type: Exec, dependsOn: [shadowJar, ':docker:quickstartSlim'] ) { environment "RUN_QUICKSTART", "false" commandLine "spark-smoke-test/smoke.sh" } diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index 507351f933cf0..e8ef0b3d6819d 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -1,4 +1,4 @@ -apply plugin: 'java' +apply plugin: 'java-library' apply plugin: 'org.hidetake.swagger.generator' configurations { @@ -6,64 +6,69 @@ configurations { } dependencies { + implementation project(':entity-registry') + api project(':metadata-utils') + api project(':metadata-events:mxe-avro-1.7') + api project(':metadata-events:mxe-registration') + api project(':metadata-events:mxe-utils-avro-1.7') + api project(':metadata-models') + api project(':metadata-service:restli-client') + api project(':metadata-service:configuration') + api project(':metadata-service:services') + + implementation spec.product.pegasus.data + implementation spec.product.pegasus.generator + + implementation externalDependency.guava + implementation externalDependency.reflections implementation externalDependency.jsonPatch - compile project(':entity-registry') - compile project(':metadata-utils') - compile project(':metadata-events:mxe-avro-1.7') - compile project(':metadata-events:mxe-registration') - compile project(':metadata-events:mxe-utils-avro-1.7') - compile project(':metadata-models') - compile project(':metadata-service:restli-client') - compile project(':metadata-service:configuration') - compile project(':metadata-service:services') - - compile spec.product.pegasus.data - compile spec.product.pegasus.generator - - compile externalDependency.dgraph4j exclude group: 'com.google.guava', module: 'guava' + api externalDependency.dgraph4j exclude group: 'com.google.guava', module: 'guava' implementation externalDependency.slf4jApi - testImplementation project(':metadata-integration:java:datahub-client') - runtime externalDependency.logbackClassic + runtimeOnly externalDependency.logbackClassic compileOnly externalDependency.lombok implementation externalDependency.commonsCollections - compile externalDependency.datastaxOssNativeProtocol - compile externalDependency.datastaxOssCore - compile externalDependency.datastaxOssQueryBuilder - compile externalDependency.elasticSearchRest - compile externalDependency.elasticSearchTransport - compile externalDependency.javatuples - compile externalDependency.javaxValidation + api externalDependency.datastaxOssNativeProtocol + api externalDependency.datastaxOssCore + api externalDependency.datastaxOssQueryBuilder + api externalDependency.elasticSearchRest + api externalDependency.elasticSearchTransport + implementation externalDependency.javatuples + api externalDependency.javaxValidation runtimeOnly externalDependency.jna - compile externalDependency.kafkaClients - compile externalDependency.ebean + api externalDependency.kafkaClients + api externalDependency.ebean enhance externalDependency.ebeanAgent - compile externalDependency.opentelemetryAnnotations - compile externalDependency.resilience4j - compile externalDependency.springContext - compile externalDependency.swaggerAnnotations + implementation externalDependency.opentelemetryAnnotations + implementation externalDependency.resilience4j + api externalDependency.springContext + implementation externalDependency.swaggerAnnotations swaggerCodegen 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.33' - compile (externalDependency.mixpanel) { + implementation(externalDependency.mixpanel) { exclude group: 'org.json', module: 'json' } annotationProcessor externalDependency.lombok - testCompile externalDependency.testng - testCompile externalDependency.h2 - testCompile externalDependency.mysqlConnector - testCompile externalDependency.neo4jHarness - testCompile externalDependency.mockito - testCompile externalDependency.mockitoInline - testCompile externalDependency.iStackCommons - testCompile externalDependency.resilience4j - testCompile externalDependency.testContainers - testCompile externalDependency.testContainersJunit - testCompile externalDependency.testContainersElasticsearch - testCompile externalDependency.testContainersCassandra - testCompile externalDependency.lombok - testCompile project(':test-models') - testImplementation externalDependency.springBootTest + testImplementation project(':test-models') + testImplementation project(path: ':test-models', configuration: 'testDataTemplate') testImplementation project(':datahub-graphql-core') + testImplementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow') + testImplementation externalDependency.testng + testImplementation externalDependency.h2 + testImplementation externalDependency.mysqlConnector + testImplementation externalDependency.neo4jHarness + testImplementation externalDependency.mockito + testImplementation externalDependency.mockitoInline + testImplementation externalDependency.iStackCommons + testImplementation externalDependency.resilience4j + testImplementation externalDependency.testContainers + testImplementation externalDependency.testContainersJunit + testImplementation externalDependency.testContainersElasticsearch + testImplementation externalDependency.testContainersCassandra + testImplementation externalDependency.lombok + testImplementation externalDependency.springBootTest + testImplementation spec.product.pegasus.restliServer + // logback >=1.3 required due to `testcontainers` only testImplementation 'ch.qos.logback:logback-classic:1.4.7' @@ -135,6 +140,7 @@ tasks.register('generateOpenApiPojos', GenerateSwaggerCode) { } compileJava.dependsOn generateOpenApiPojos +processResources.dependsOn generateOpenApiPojos sourceSets.main.java.srcDir "${generateOpenApiPojos.outputDir}/src/main/java" sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/resources" diff --git a/metadata-jobs/mae-consumer-job/build.gradle b/metadata-jobs/mae-consumer-job/build.gradle index 3811a9537ac24..ca099eea5a8a3 100644 --- a/metadata-jobs/mae-consumer-job/build.gradle +++ b/metadata-jobs/mae-consumer-job/build.gradle @@ -11,22 +11,27 @@ ext { } dependencies { + implementation project(':metadata-service:factories') implementation project(':metadata-jobs:mae-consumer') // TODO: Extract PE consumer into separate pod. implementation project(':metadata-jobs:pe-consumer') + implementation(externalDependency.springBootStarterWeb) { exclude module: "spring-boot-starter-tomcat" } implementation externalDependency.springBootStarterJetty implementation externalDependency.springKafka + implementation externalDependency.springBootAutoconfigure + implementation externalDependency.springActuator implementation externalDependency.slf4jApi implementation externalDependency.log4j2Api compileOnly externalDependency.lombok implementation externalDependency.logbackClassic + testImplementation project(':metadata-dao-impl:kafka-producer') testImplementation externalDependency.springBootTest - testCompile externalDependency.mockito - testCompile externalDependency.testng + testImplementation externalDependency.mockito + testImplementation externalDependency.testng } bootJar { diff --git a/metadata-jobs/mae-consumer/build.gradle b/metadata-jobs/mae-consumer/build.gradle index 26b3d82b8570a..69fe2255a6916 100644 --- a/metadata-jobs/mae-consumer/build.gradle +++ b/metadata-jobs/mae-consumer/build.gradle @@ -11,40 +11,41 @@ configurations { dependencies { avro project(path: ':metadata-models', configuration: 'avroSchema') - compile project(':li-utils') - compile (project(':metadata-service:factories')) { + implementation project(':li-utils') + implementation(project(':metadata-service:factories')) { exclude group: 'org.neo4j.test' } - compile project(':metadata-service:auth-config') - compile project(':metadata-service:restli-client') - compile project(':metadata-io') - compile project(':ingestion-scheduler') - compile project(':metadata-utils') - compile project(":entity-registry") - compile project(':metadata-events:mxe-avro-1.7') - compile project(':metadata-events:mxe-registration') - compile project(':metadata-events:mxe-utils-avro-1.7') + implementation project(':metadata-service:auth-config') + implementation project(':metadata-service:restli-client') + implementation project(':metadata-io') + implementation project(':ingestion-scheduler') + implementation project(':metadata-utils') + implementation project(":entity-registry") + implementation project(':metadata-events:mxe-avro-1.7') + implementation project(':metadata-events:mxe-registration') + implementation project(':metadata-events:mxe-utils-avro-1.7') + implementation project(':datahub-graphql-core') - compile externalDependency.elasticSearchRest - compile externalDependency.kafkaAvroSerde + implementation externalDependency.elasticSearchRest + implementation externalDependency.kafkaAvroSerde implementation externalDependency.protobuf - compile externalDependency.neo4jJavaDriver + implementation externalDependency.neo4jJavaDriver - compile externalDependency.springKafka - compile externalDependency.springActuator + implementation externalDependency.springKafka + implementation externalDependency.springActuator implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - runtime externalDependency.logbackClassic + runtimeOnly externalDependency.logbackClassic - testCompile externalDependency.mockito + testImplementation externalDependency.mockito implementation externalDependency.awsMskIamAuth testImplementation externalDependency.springBootTest - testRuntime externalDependency.logbackClassic + testRuntimeOnly externalDependency.logbackClassic } task avroSchemaSources(type: Copy) { diff --git a/metadata-jobs/mce-consumer-job/build.gradle b/metadata-jobs/mce-consumer-job/build.gradle index 2229c387f3676..b72d4baff23d6 100644 --- a/metadata-jobs/mce-consumer-job/build.gradle +++ b/metadata-jobs/mce-consumer-job/build.gradle @@ -21,6 +21,8 @@ dependencies { } implementation externalDependency.springBootStarterJetty implementation externalDependency.springKafka + implementation externalDependency.springBootAutoconfigure + implementation externalDependency.springActuator implementation spec.product.pegasus.restliDocgen implementation spec.product.pegasus.restliSpringBridge implementation externalDependency.slf4jApi @@ -28,15 +30,16 @@ dependencies { compileOnly externalDependency.lombok implementation externalDependency.logbackClassic - runtime externalDependency.mariadbConnector - runtime externalDependency.mysqlConnector - runtime externalDependency.postgresql + runtimeOnly externalDependency.mariadbConnector + runtimeOnly externalDependency.mysqlConnector + runtimeOnly externalDependency.postgresql annotationProcessor externalDependency.lombok + testImplementation project(':metadata-dao-impl:kafka-producer') testImplementation externalDependency.springBootTest - testCompile externalDependency.mockito - testCompile externalDependency.testng + testImplementation externalDependency.mockito + testImplementation externalDependency.testng } bootJar { diff --git a/metadata-jobs/mce-consumer/build.gradle b/metadata-jobs/mce-consumer/build.gradle index 467d1dbdd3717..0bca55e0e5f92 100644 --- a/metadata-jobs/mce-consumer/build.gradle +++ b/metadata-jobs/mce-consumer/build.gradle @@ -11,24 +11,24 @@ configurations { dependencies { avro project(path: ':metadata-models', configuration: 'avroSchema') - compile project(':li-utils') - compile (project(':metadata-service:factories')) { + implementation project(':li-utils') + implementation(project(':metadata-service:factories')) { exclude group: 'org.neo4j.test' } - compile project(':metadata-utils') - compile project(':metadata-events:mxe-schemas') - compile project(':metadata-events:mxe-avro-1.7') - compile project(':metadata-events:mxe-registration') - compile project(':metadata-events:mxe-utils-avro-1.7') - compile project(':metadata-io') - compile project(':metadata-service:restli-client') - compile spec.product.pegasus.restliClient - compile spec.product.pegasus.restliCommon - compile externalDependency.elasticSearchRest + implementation project(':metadata-utils') + implementation project(':metadata-events:mxe-schemas') + implementation project(':metadata-events:mxe-avro-1.7') + implementation project(':metadata-events:mxe-registration') + implementation project(':metadata-events:mxe-utils-avro-1.7') + implementation project(':metadata-io') + implementation project(':metadata-service:restli-client') + implementation spec.product.pegasus.restliClient + implementation spec.product.pegasus.restliCommon + implementation externalDependency.elasticSearchRest implementation externalDependency.protobuf - compile externalDependency.springKafka - compile externalDependency.springActuator + implementation externalDependency.springKafka + implementation externalDependency.springActuator implementation externalDependency.slf4jApi compileOnly externalDependency.lombok diff --git a/metadata-jobs/pe-consumer/build.gradle b/metadata-jobs/pe-consumer/build.gradle index 517b021353f9d..1899a4de15635 100644 --- a/metadata-jobs/pe-consumer/build.gradle +++ b/metadata-jobs/pe-consumer/build.gradle @@ -9,21 +9,21 @@ configurations { dependencies { avro project(path: ':metadata-models', configuration: 'avroSchema') - compile project(':li-utils') - compile project(':metadata-events:mxe-avro-1.7') - compile project(':metadata-events:mxe-registration') - compile project(':metadata-events:mxe-utils-avro-1.7') - compile (project(':metadata-service:factories')) { + implementation project(':li-utils') + implementation project(':metadata-events:mxe-avro-1.7') + implementation project(':metadata-events:mxe-registration') + implementation project(':metadata-events:mxe-utils-avro-1.7') + implementation(project(':metadata-service:factories')) { exclude group: 'org.neo4j.test' } - compile externalDependency.springKafka - compile externalDependency.springActuator + implementation externalDependency.springKafka + implementation externalDependency.springActuator implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - runtime externalDependency.logbackClassic - testCompile externalDependency.mockito - testRuntime externalDependency.logbackClassic + runtimeOnly externalDependency.logbackClassic + testImplementation externalDependency.mockito + testRuntimeOnly externalDependency.logbackClassic } task avroSchemaSources(type: Copy) { diff --git a/metadata-models-custom/build.gradle b/metadata-models-custom/build.gradle index 4af866502f5dc..95a00766039a8 100644 --- a/metadata-models-custom/build.gradle +++ b/metadata-models-custom/build.gradle @@ -11,10 +11,10 @@ buildscript { plugins { id 'base' + id 'maven-publish' } apply plugin: 'pegasus' - if (project.hasProperty('projVersion')) { project.version = project.projVersion } else { @@ -23,11 +23,11 @@ if (project.hasProperty('projVersion')) { dependencies { - compile spec.product.pegasus.data + implementation spec.product.pegasus.data // Uncomment these if you want to depend on models defined in core datahub - //compile project(':li-utils') + //implementation project(':li-utils') //dataModel project(':li-utils') - //compile project(':metadata-models') + //implementation project(':metadata-models') //dataModel project(':metadata-models') } @@ -69,6 +69,6 @@ task modelDeploy(type: Copy) { modelDeploy.dependsOn modelArtifact -install.dependsOn modelDeploy +publish.dependsOn modelDeploy diff --git a/metadata-models-validator/build.gradle b/metadata-models-validator/build.gradle index bd1ec9449fb19..c8d1d2e6651d6 100644 --- a/metadata-models-validator/build.gradle +++ b/metadata-models-validator/build.gradle @@ -1,13 +1,13 @@ apply plugin: 'java' dependencies { - compile project(":entity-registry") - compile spec.product.pegasus.data - compile spec.product.pegasus.generator + implementation project(":entity-registry") + implementation spec.product.pegasus.data + implementation spec.product.pegasus.generator - compile externalDependency.commonsIo - compile externalDependency.findbugsAnnotations - compile externalDependency.guava + implementation externalDependency.commonsIo + implementation externalDependency.findbugsAnnotations + implementation externalDependency.guava implementation externalDependency.slf4jApi runtimeOnly externalDependency.logbackClassic diff --git a/metadata-models/build.gradle b/metadata-models/build.gradle index 432823852a263..2e8efae9b7bce 100644 --- a/metadata-models/build.gradle +++ b/metadata-models/build.gradle @@ -1,6 +1,6 @@ import io.datahubproject.GenerateJsonSchemaTask - +apply plugin: 'java-library' apply plugin: 'pegasus' tasks.withType(JavaCompile).configureEach { @@ -15,16 +15,16 @@ tasks.withType(Test).configureEach { } dependencies { - compile spec.product.pegasus.data + api spec.product.pegasus.data constraints { implementation('org.apache.commons:commons-text:1.10.0') { because 'Vulnerability Issue' } } - compile project(':li-utils') + api project(':li-utils') dataModel project(':li-utils') - testCompile externalDependency.guava + testImplementation externalDependency.guava } mainAvroSchemaJar.dependsOn generateAvroSchema diff --git a/metadata-service/auth-config/build.gradle b/metadata-service/auth-config/build.gradle index 2e9210804bed9..c7a1128897dd5 100644 --- a/metadata-service/auth-config/build.gradle +++ b/metadata-service/auth-config/build.gradle @@ -1,9 +1,9 @@ apply plugin: 'java' dependencies { - compile project(path: ':metadata-models') - compile project(path: ':metadata-auth:auth-api') - compile externalDependency.guava + implementation project(path: ':metadata-models') + implementation project(path: ':metadata-auth:auth-api') + implementation externalDependency.guava implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok diff --git a/metadata-service/auth-filter/build.gradle b/metadata-service/auth-filter/build.gradle index 2c77850209205..2dd07ef10274c 100644 --- a/metadata-service/auth-filter/build.gradle +++ b/metadata-service/auth-filter/build.gradle @@ -1,15 +1,17 @@ apply plugin: 'java' dependencies { - compile project(':metadata-auth:auth-api'); - compile project(path: ':metadata-service:auth-config') - compile project(path: ':metadata-service:factories') + implementation project(':metadata-auth:auth-api') + implementation project(':metadata-service:auth-impl') + implementation project(path: ':metadata-service:auth-config') + implementation project(path: ':metadata-service:factories') - compile externalDependency.servletApi + implementation externalDependency.servletApi implementation externalDependency.slf4jApi compileOnly externalDependency.lombok - compile externalDependency.springWeb + implementation externalDependency.springWeb + implementation externalDependency.guice annotationProcessor externalDependency.lombok - testCompile externalDependency.mockito + testImplementation externalDependency.mockito } \ No newline at end of file diff --git a/metadata-service/auth-impl/build.gradle b/metadata-service/auth-impl/build.gradle index aefbf81577a9b..1ffeb99e7ad4a 100644 --- a/metadata-service/auth-impl/build.gradle +++ b/metadata-service/auth-impl/build.gradle @@ -6,11 +6,14 @@ compileJava { } dependencies { - compile project(path: ':metadata-models') - compile project(path: ':metadata-auth:auth-api') - compile project(path: ':metadata-service:auth-config') - compile project(path: ':metadata-io') - + implementation project(path: ':metadata-models') + implementation project(path: ':metadata-auth:auth-api') + implementation project(path: ':metadata-service:auth-config') + implementation project(path: ':metadata-io') + + implementation(externalDependency.mixpanel) { + exclude group: 'org.json', module: 'json' + } implementation 'io.jsonwebtoken:jjwt-api:0.11.2' runtimeOnly 'io.jsonwebtoken:jjwt-impl:0.11.2', 'io.jsonwebtoken:jjwt-jackson:0.11.2' @@ -20,6 +23,5 @@ dependencies { annotationProcessor externalDependency.lombok - testCompile externalDependency.mockito - + testImplementation externalDependency.mockito } \ No newline at end of file diff --git a/metadata-service/auth-servlet-impl/build.gradle b/metadata-service/auth-servlet-impl/build.gradle index 3338f3a5c6b94..7945b3b4e9a06 100644 --- a/metadata-service/auth-servlet-impl/build.gradle +++ b/metadata-service/auth-servlet-impl/build.gradle @@ -1,15 +1,17 @@ apply plugin: 'java' dependencies { - compile project(':metadata-auth:auth-api') - compile project(':metadata-service:factories') + implementation project(':metadata-auth:auth-api') + implementation project(':metadata-service:auth-impl') + implementation project(':metadata-service:factories') - compile externalDependency.springCore - compile externalDependency.springWeb - compile externalDependency.springWebMVC - compile externalDependency.graphqlJava - compile externalDependency.springBeans - compile externalDependency.springContext + implementation externalDependency.springCore + implementation externalDependency.springWeb + implementation externalDependency.springWebMVC + implementation externalDependency.graphqlJava + implementation externalDependency.springBeans + implementation externalDependency.springContext + implementation externalDependency.guice implementation externalDependency.slf4jApi compileOnly externalDependency.lombok diff --git a/metadata-service/configuration/build.gradle b/metadata-service/configuration/build.gradle index 8623e53d2554a..30fa3079d29a4 100644 --- a/metadata-service/configuration/build.gradle +++ b/metadata-service/configuration/build.gradle @@ -3,7 +3,7 @@ plugins { } dependencies { - compile externalDependency.jacksonDataBind + implementation externalDependency.jacksonDataBind implementation externalDependency.slf4jApi implementation externalDependency.springCore diff --git a/metadata-service/factories/build.gradle b/metadata-service/factories/build.gradle index 8e9b859e3b136..f848a5e339781 100644 --- a/metadata-service/factories/build.gradle +++ b/metadata-service/factories/build.gradle @@ -1,54 +1,58 @@ -apply plugin: 'java' +apply plugin: 'java-library' apply from: "../../gradle/versioning/versioning.gradle" dependencies { - compile project(':metadata-io') - compile project(':metadata-utils') - compile project(':metadata-service:auth-impl') - compile project(':metadata-service:auth-config') - compile project(':metadata-service:plugin') - compile project(':metadata-service:configuration') - compile project(':datahub-graphql-core') - compile project(':metadata-service:restli-servlet-impl') - compile project(':metadata-dao-impl:kafka-producer') - compile project(':ingestion-scheduler') + api project(':metadata-io') + api project(':metadata-utils') + implementation project(':metadata-service:auth-impl') + api project(':metadata-service:auth-config') + api project(':metadata-service:plugin') + api project(':metadata-service:configuration') + implementation project(':datahub-graphql-core') + implementation project(':metadata-service:restli-servlet-impl') + implementation project(':metadata-dao-impl:kafka-producer') + implementation project(':ingestion-scheduler') - compile (externalDependency.awsGlueSchemaRegistrySerde) { + implementation (externalDependency.awsGlueSchemaRegistrySerde) { exclude group: 'org.json', module: 'json' } - compile externalDependency.elasticSearchRest - compile externalDependency.httpClient - compile externalDependency.gson + implementation externalDependency.elasticSearchRest + implementation externalDependency.httpClient + implementation externalDependency.gson implementation (externalDependency.hazelcast) { exclude group: 'org.json', module: 'json' } - compile externalDependency.hazelcastSpring - compile externalDependency.kafkaClients - compile externalDependency.kafkaAvroSerde + implementation externalDependency.hazelcastSpring + implementation externalDependency.kafkaClients + implementation externalDependency.kafkaAvroSerde compileOnly externalDependency.lombok - compile externalDependency.servletApi - compile externalDependency.springBeans - compile externalDependency.springBootAutoconfigure - compile externalDependency.springBootStarterCache - compile externalDependency.springContext - compile externalDependency.springCore - compile externalDependency.springKafka - compile externalDependency.springWeb + implementation externalDependency.servletApi + api externalDependency.springBeans + implementation externalDependency.springBootAutoconfigure + implementation externalDependency.springBootStarterCache + api externalDependency.springContext + api externalDependency.springCore + api externalDependency.springKafka + api externalDependency.springWeb implementation externalDependency.awsPostgresIamAuth implementation externalDependency.awsRds + implementation(externalDependency.mixpanel) { + exclude group: 'org.json', module: 'json' + } annotationProcessor externalDependency.lombok - compile spec.product.pegasus.restliSpringBridge + implementation spec.product.pegasus.restliSpringBridge implementation spec.product.pegasus.restliDocgen + implementation externalDependency.jline + implementation externalDependency.common testImplementation externalDependency.springBootTest + testImplementation externalDependency.mockito + testImplementation externalDependency.testng + testImplementation externalDependency.hazelcastTest + testImplementation externalDependency.javatuples - testCompile externalDependency.mockito - testCompile externalDependency.testng - testCompile externalDependency.hazelcastTest - implementation externalDependency.jline - implementation externalDependency.common constraints { implementation(externalDependency.snappy) { diff --git a/metadata-service/graphql-servlet-impl/build.gradle b/metadata-service/graphql-servlet-impl/build.gradle index ff64f9a8a8233..52fd20ef32389 100644 --- a/metadata-service/graphql-servlet-impl/build.gradle +++ b/metadata-service/graphql-servlet-impl/build.gradle @@ -1,16 +1,19 @@ apply plugin: 'java' dependencies { - compile project(':datahub-graphql-core') - compile project(':metadata-auth:auth-api') - compile project(':metadata-service:factories') + implementation project(':datahub-graphql-core') + implementation project(':metadata-auth:auth-api') + implementation project(':metadata-service:auth-impl') + implementation project(':metadata-service:factories') - compile externalDependency.springCore - compile externalDependency.springWeb - compile externalDependency.springWebMVC - compile externalDependency.graphqlJava - compile externalDependency.springBeans - compile externalDependency.springContext + implementation externalDependency.servletApi + implementation externalDependency.springCore + implementation externalDependency.springWeb + implementation externalDependency.springWebMVC + implementation externalDependency.graphqlJava + implementation externalDependency.springBeans + implementation externalDependency.springContext + implementation externalDependency.guice implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok diff --git a/metadata-service/health-servlet/build.gradle b/metadata-service/health-servlet/build.gradle index 3237c56779ada..6095f724b3cd4 100644 --- a/metadata-service/health-servlet/build.gradle +++ b/metadata-service/health-servlet/build.gradle @@ -2,16 +2,17 @@ apply plugin: 'java' dependencies { - compile project(':metadata-service:factories') + implementation project(':metadata-service:factories') - compile externalDependency.reflections - compile externalDependency.springBoot - compile externalDependency.springCore - compile externalDependency.springDocUI - compile externalDependency.springWeb - compile externalDependency.springWebMVC - compile externalDependency.springBeans - compile externalDependency.springContext + implementation externalDependency.guava + implementation externalDependency.reflections + implementation externalDependency.springBoot + implementation externalDependency.springCore + implementation externalDependency.springDocUI + implementation externalDependency.springWeb + implementation externalDependency.springWebMVC + implementation externalDependency.springBeans + implementation externalDependency.springContext implementation externalDependency.slf4jApi compileOnly externalDependency.lombok implementation externalDependency.antlr4Runtime diff --git a/metadata-service/health-servlet/src/main/java/com/datahub/health/controller/HealthCheckController.java b/metadata-service/health-servlet/src/main/java/com/datahub/health/controller/HealthCheckController.java index 45edcb2a6a5d9..02ca5182cd2be 100644 --- a/metadata-service/health-servlet/src/main/java/com/datahub/health/controller/HealthCheckController.java +++ b/metadata-service/health-servlet/src/main/java/com/datahub/health/controller/HealthCheckController.java @@ -10,6 +10,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; + import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.RequestOptions; diff --git a/metadata-service/openapi-servlet/build.gradle b/metadata-service/openapi-servlet/build.gradle index 7cd022f97247c..1909b4862d294 100644 --- a/metadata-service/openapi-servlet/build.gradle +++ b/metadata-service/openapi-servlet/build.gradle @@ -2,36 +2,38 @@ apply plugin: 'java' dependencies { - compile project(':metadata-auth:auth-api') - compile project(':metadata-service:factories') - compile project(':metadata-service:schema-registry-api') + implementation project(':metadata-auth:auth-api') + implementation project(':metadata-service:auth-impl') + implementation project(':metadata-service:factories') + implementation project(':metadata-service:schema-registry-api') - compile externalDependency.reflections - compile externalDependency.springBoot - compile externalDependency.springCore - compile(externalDependency.springDocUI) { + implementation externalDependency.reflections + implementation externalDependency.springBoot + implementation externalDependency.springCore + implementation(externalDependency.springDocUI) { exclude group: 'org.springframework.boot' } - compile externalDependency.springWeb - compile externalDependency.springWebMVC - compile externalDependency.springBeans - compile externalDependency.springContext + implementation externalDependency.springWeb + implementation externalDependency.springWebMVC + implementation externalDependency.springBeans + implementation externalDependency.springContext implementation externalDependency.slf4jApi compileOnly externalDependency.lombok implementation externalDependency.antlr4Runtime implementation externalDependency.antlr4 + implementation externalDependency.swaggerAnnotations annotationProcessor externalDependency.lombok testImplementation externalDependency.springBootTest testImplementation project(':mock-entity-registry') - testCompile externalDependency.springBoot - testCompile externalDependency.testContainers - testCompile externalDependency.springKafka - testCompile externalDependency.testng - testCompile externalDependency.mockito - testCompile externalDependency.logbackClassic - testCompile externalDependency.jacksonCore - testCompile externalDependency.jacksonDataBind - testCompile externalDependency.springBootStarterWeb + testImplementation externalDependency.springBoot + testImplementation externalDependency.testContainers + testImplementation externalDependency.springKafka + testImplementation externalDependency.testng + testImplementation externalDependency.mockito + testImplementation externalDependency.logbackClassic + testImplementation externalDependency.jacksonCore + testImplementation externalDependency.jacksonDataBind + testImplementation externalDependency.springBootStarterWeb } \ No newline at end of file diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java index 796a7774da303..1e37170f37b3b 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java @@ -18,8 +18,11 @@ import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; import io.datahubproject.openapi.exception.UnauthorizedException; -import io.swagger.annotations.ApiOperation; +import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Content; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.tags.Tag; import java.net.URLDecoder; import java.nio.charset.Charset; @@ -94,7 +97,8 @@ private RelatedEntitiesResult getRelatedEntities(String rawUrn, List rel } @GetMapping(value = "/", produces = MediaType.APPLICATION_JSON_VALUE) - @ApiOperation(code = 0, response = RelatedEntitiesResult.class, value = "") + @Operation(responses = { @ApiResponse(responseCode = "0", description = "", + content = @Content(schema = @Schema(implementation = RelatedEntitiesResult.class)))}) public ResponseEntity getRelationships( @Parameter(name = "urn", required = true, description = "The urn for the entity whose relationships are being queried") diff --git a/metadata-service/plugin/src/test/sample-test-plugins/build.gradle b/metadata-service/plugin/src/test/sample-test-plugins/build.gradle index 7d4b43402a586..f299a35db0f64 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/build.gradle +++ b/metadata-service/plugin/src/test/sample-test-plugins/build.gradle @@ -7,6 +7,7 @@ dependencies { implementation project(path: ':metadata-auth:auth-api') implementation externalDependency.lombok implementation externalDependency.logbackClassic; + implementation 'com.google.code.findbugs:jsr305:3.0.2' testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.1' testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.1' diff --git a/metadata-service/restli-client/build.gradle b/metadata-service/restli-client/build.gradle index 263d4b49197f4..45cf008d3ca7d 100644 --- a/metadata-service/restli-client/build.gradle +++ b/metadata-service/restli-client/build.gradle @@ -1,18 +1,19 @@ apply plugin: 'pegasus' +apply plugin: 'java-library' dependencies { - compile project(':metadata-service:restli-api') - compile project(':metadata-auth:auth-api') - compile project(path: ':metadata-service:restli-api', configuration: 'restClient') - compile project(':metadata-events:mxe-schemas') - compile project(':metadata-utils') + api project(':metadata-service:restli-api') + api project(':metadata-auth:auth-api') + api project(path: ':metadata-service:restli-api', configuration: 'restClient') + api project(':metadata-events:mxe-schemas') + api project(':metadata-utils') implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - compile spec.product.pegasus.restliClient + implementation spec.product.pegasus.restliClient - testCompile externalDependency.mockito - testCompile externalDependency.testng + testImplementation externalDependency.mockito + testImplementation externalDependency.testng } diff --git a/metadata-service/restli-servlet-impl/build.gradle b/metadata-service/restli-servlet-impl/build.gradle index 1028f7c3dcce4..cb307863748c3 100644 --- a/metadata-service/restli-servlet-impl/build.gradle +++ b/metadata-service/restli-servlet-impl/build.gradle @@ -11,7 +11,7 @@ sourceSets { idea { module { testSourceDirs += file('src/integTest/java') - scopes.TEST.plus += [ configurations.integTestCompile ] + scopes.TEST.plus += [ configurations.integTestCompileOnly ] } } @@ -19,6 +19,10 @@ idea { configurations { integTestImplementation.extendsFrom implementation integTestRuntimeOnly.extendsFrom runtimeOnly + integTestCompileOnly { + extendsFrom compileOnly + canBeResolved = true + } modelValidation } @@ -32,34 +36,37 @@ dependencies { } } - compile project(':metadata-service:restli-api') - compile project(':metadata-auth:auth-api') - compile project(path: ':metadata-service:restli-api', configuration: 'dataTemplate') - compile project(':li-utils') - compile project(':metadata-models') - compile project(':metadata-utils') - compile project(':metadata-io') - compile spec.product.pegasus.restliServer + implementation project(':metadata-service:restli-api') + implementation project(':metadata-auth:auth-api') + implementation project(path: ':metadata-service:restli-api', configuration: 'dataTemplate') + implementation project(':li-utils') + implementation project(':metadata-models') + implementation project(':metadata-utils') + implementation project(':metadata-io') + implementation spec.product.pegasus.restliServer implementation externalDependency.slf4jApi - // This is compile and not compileOnly because of restli - compile externalDependency.lombok - compile externalDependency.neo4jJavaDriver - compile externalDependency.opentelemetryAnnotations + implementation externalDependency.dropwizardMetricsCore + implementation externalDependency.dropwizardMetricsJmx + + compileOnly externalDependency.lombok + implementation externalDependency.neo4jJavaDriver + implementation externalDependency.opentelemetryAnnotations runtimeOnly externalDependency.logbackClassic annotationProcessor externalDependency.lombok - testCompile project(':test-models') + testImplementation project(':test-models') + testImplementation project(path: ':test-models', configuration: 'testDataTemplate') testImplementation project(':mock-entity-registry') - testCompile externalDependency.mockito - testCompile externalDependency.testng + testImplementation externalDependency.mockito + testImplementation externalDependency.testng integTestImplementation externalDependency.junitJupiterApi integTestRuntimeOnly externalDependency.junitJupiterEngine - integTestCompile externalDependency.junitJupiterApi - integTestCompile externalDependency.junitJupiterParams + integTestCompileOnly externalDependency.junitJupiterApi + integTestCompileOnly externalDependency.junitJupiterParams modelValidation project(path: ':metadata-models-validator') dataModel project(path: ':metadata-models', configuration: 'dataTemplate') diff --git a/metadata-service/schema-registry-api/build.gradle b/metadata-service/schema-registry-api/build.gradle index e60ca7d348b5c..7bf1e558c8906 100644 --- a/metadata-service/schema-registry-api/build.gradle +++ b/metadata-service/schema-registry-api/build.gradle @@ -3,26 +3,26 @@ apply plugin: 'org.hidetake.swagger.generator' dependencies { // Dependencies for open api - compile externalDependency.reflections - compile externalDependency.springBoot - compile externalDependency.springCore - compile externalDependency.springWeb - compile externalDependency.springWebMVC - compile externalDependency.springBeans - compile externalDependency.springContext + implementation externalDependency.reflections + implementation externalDependency.springBoot + implementation externalDependency.springCore + implementation externalDependency.springWeb + implementation externalDependency.springWebMVC + implementation externalDependency.springBeans + implementation externalDependency.springContext implementation externalDependency.antlr4Runtime implementation externalDependency.antlr4 - compile externalDependency.javaxValidation - compile externalDependency.servletApi - compile group: 'javax.annotation', name: 'javax.annotation-api', version: '1.3.2' - compile externalDependency.jacksonDataBind - compile externalDependency.slf4jApi + implementation externalDependency.javaxValidation + implementation externalDependency.servletApi + implementation group: 'javax.annotation', name: 'javax.annotation-api', version: '1.3.2' + implementation externalDependency.jacksonDataBind + implementation externalDependency.slf4jApi // End of dependencies - compile externalDependency.swaggerAnnotations - swaggerCodegen 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.33' + implementation externalDependency.swaggerAnnotations + swaggerCodegen 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.46' - testCompile externalDependency.assertJ + testImplementation externalDependency.assertJ } tasks.register('generateOpenApiPojos', GenerateSwaggerCode) { diff --git a/metadata-service/schema-registry-servlet/build.gradle b/metadata-service/schema-registry-servlet/build.gradle index ec62203ddf0c5..554ac696c94fd 100644 --- a/metadata-service/schema-registry-servlet/build.gradle +++ b/metadata-service/schema-registry-servlet/build.gradle @@ -1,19 +1,20 @@ apply plugin: 'java' dependencies { - compile project(':metadata-service:factories') - compile project(':metadata-service:schema-registry-api') + implementation project(':metadata-service:factories') + implementation project(':metadata-service:schema-registry-api') - compile externalDependency.reflections - compile externalDependency.springBoot - compile externalDependency.springCore - compile(externalDependency.springDocUI) { + implementation externalDependency.reflections + implementation externalDependency.springBoot + implementation externalDependency.springCore + implementation(externalDependency.springDocUI) { exclude group: 'org.springframework.boot' } - compile externalDependency.springWeb - compile externalDependency.springWebMVC - compile externalDependency.springBeans - compile externalDependency.springContext + implementation externalDependency.springWeb + implementation externalDependency.springWebMVC + implementation externalDependency.springBeans + implementation externalDependency.springContext + implementation externalDependency.springBootAutoconfigure implementation externalDependency.slf4jApi compileOnly externalDependency.lombok implementation externalDependency.antlr4Runtime @@ -23,14 +24,14 @@ dependencies { testImplementation externalDependency.springBootTest testImplementation project(':mock-entity-registry') - testCompile externalDependency.springBoot - testCompile externalDependency.testContainers - testCompile externalDependency.testContainersKafka - testCompile externalDependency.springKafka - testCompile externalDependency.testng - testCompile externalDependency.mockito - testCompile externalDependency.logbackClassic - testCompile externalDependency.jacksonCore - testCompile externalDependency.jacksonDataBind - testCompile externalDependency.springBootStarterWeb + testImplementation externalDependency.springBoot + testImplementation externalDependency.testContainers + testImplementation externalDependency.testContainersKafka + testImplementation externalDependency.springKafka + testImplementation externalDependency.testng + testImplementation externalDependency.mockito + testImplementation externalDependency.logbackClassic + testImplementation externalDependency.jacksonCore + testImplementation externalDependency.jacksonDataBind + testImplementation externalDependency.springBootStarterWeb } \ No newline at end of file diff --git a/metadata-service/services/build.gradle b/metadata-service/services/build.gradle index adc7b7bf09d99..99345d6f6bc3f 100644 --- a/metadata-service/services/build.gradle +++ b/metadata-service/services/build.gradle @@ -7,32 +7,33 @@ configurations { dependencies { implementation externalDependency.jsonPatch - compile project(':entity-registry') - compile project(':metadata-utils') - compile project(':metadata-events:mxe-avro-1.7') - compile project(':metadata-events:mxe-registration') - compile project(':metadata-events:mxe-utils-avro-1.7') - compile project(':metadata-models') - compile project(':metadata-service:restli-client') - compile project(':metadata-service:configuration') + implementation project(':entity-registry') + implementation project(':metadata-utils') + implementation project(':metadata-events:mxe-avro-1.7') + implementation project(':metadata-events:mxe-registration') + implementation project(':metadata-events:mxe-utils-avro-1.7') + implementation project(':metadata-models') + implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:configuration') implementation externalDependency.slf4jApi implementation externalDependency.swaggerAnnotations - runtime externalDependency.logbackClassic + runtimeOnly externalDependency.logbackClassic compileOnly externalDependency.lombok implementation externalDependency.commonsCollections - compile externalDependency.javatuples - compile externalDependency.javaxValidation - compile externalDependency.opentelemetryAnnotations + implementation externalDependency.javatuples + implementation externalDependency.javaxValidation + implementation externalDependency.opentelemetryAnnotations annotationProcessor externalDependency.lombok - testCompile externalDependency.testng - testCompile externalDependency.junit - testCompile externalDependency.mockito - testCompile externalDependency.mockitoInline + testImplementation externalDependency.testng + testImplementation externalDependency.junit + testImplementation externalDependency.mockito + testImplementation externalDependency.mockitoInline testCompileOnly externalDependency.lombok - testCompile project(':test-models') + testImplementation project(':test-models') + testImplementation project(path: ':test-models', configuration: 'testDataTemplate') testImplementation project(':datahub-graphql-core') // logback >=1.3 required due to `testcontainers` only testImplementation 'ch.qos.logback:logback-classic:1.4.7' diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java index b52d68e2e75ee..9e12fc80a3cdb 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java @@ -20,7 +20,7 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; +import com.linkedin.entity.client.EntityClient; import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; diff --git a/metadata-service/servlet/build.gradle b/metadata-service/servlet/build.gradle index 9242d21201886..eb2cd9c2d3de7 100644 --- a/metadata-service/servlet/build.gradle +++ b/metadata-service/servlet/build.gradle @@ -1,13 +1,16 @@ apply plugin: 'java' dependencies { - compile project(':metadata-io') - compile externalDependency.httpClient - compile externalDependency.servletApi - compile externalDependency.gson - compile externalDependency.jacksonDataBind - compile externalDependency.springWebMVC + implementation project(':metadata-io') + implementation project(':datahub-graphql-core') + implementation project(':entity-registry') + implementation project(':metadata-service:factories') + + implementation externalDependency.httpClient + implementation externalDependency.servletApi + implementation externalDependency.gson + implementation externalDependency.jacksonDataBind + implementation externalDependency.springWebMVC + compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - compile project(':entity-registry') - compile project(':metadata-service:factories') } diff --git a/metadata-service/war/build.gradle b/metadata-service/war/build.gradle index eaf14f7fd6c18..ae207e0260e60 100644 --- a/metadata-service/war/build.gradle +++ b/metadata-service/war/build.gradle @@ -12,33 +12,33 @@ ext { ext.apiProject = project(':metadata-service:restli-api') dependencies { - runtime project(':metadata-service:factories') - runtime project(':metadata-service:auth-filter') - runtime project(':metadata-service:servlet') - runtime project(':metadata-service:auth-servlet-impl') - runtime project(':metadata-service:graphql-servlet-impl') - runtime project(':metadata-service:health-servlet') - runtime project(':metadata-service:openapi-servlet') - runtime project(':metadata-service:schema-registry-servlet') - runtime project(':metadata-jobs:mce-consumer') - runtime project(':metadata-jobs:mae-consumer') - runtime project(':metadata-jobs:pe-consumer') + runtimeOnly project(':metadata-service:factories') + runtimeOnly project(':metadata-service:auth-filter') + runtimeOnly project(':metadata-service:servlet') + runtimeOnly project(':metadata-service:auth-servlet-impl') + runtimeOnly project(':metadata-service:graphql-servlet-impl') + runtimeOnly project(':metadata-service:health-servlet') + runtimeOnly project(':metadata-service:openapi-servlet') + runtimeOnly project(':metadata-service:schema-registry-servlet') + runtimeOnly project(':metadata-jobs:mce-consumer') + runtimeOnly project(':metadata-jobs:mae-consumer') + runtimeOnly project(':metadata-jobs:pe-consumer') - runtime externalDependency.awsSecretsManagerJdbc - runtime externalDependency.h2 - runtime externalDependency.mariadbConnector - runtime externalDependency.mysqlConnector - runtime externalDependency.postgresql - runtime externalDependency.springWebMVC + runtimeOnly externalDependency.awsSecretsManagerJdbc + runtimeOnly externalDependency.h2 + runtimeOnly externalDependency.mariadbConnector + runtimeOnly externalDependency.mysqlConnector + runtimeOnly externalDependency.postgresql + runtimeOnly externalDependency.springWebMVC - runtime spec.product.pegasus.restliDocgen - runtime spec.product.pegasus.restliSpringBridge + runtimeOnly spec.product.pegasus.restliDocgen + runtimeOnly spec.product.pegasus.restliSpringBridge - runtime externalDependency.log4jCore - runtime externalDependency.log4j2Api - runtime externalDependency.logbackClassic + runtimeOnly externalDependency.log4jCore + runtimeOnly externalDependency.log4j2Api + runtimeOnly externalDependency.logbackClassic implementation externalDependency.awsMskIamAuth - testRuntime externalDependency.logbackClassic + testRuntimeOnly externalDependency.logbackClassic implementation externalDependency.charle } configurations.all{ diff --git a/metadata-utils/build.gradle b/metadata-utils/build.gradle index 3b04a5dc53d75..9f8ef70a0e728 100644 --- a/metadata-utils/build.gradle +++ b/metadata-utils/build.gradle @@ -1,30 +1,31 @@ -apply plugin: 'java' +apply plugin: 'java-library' dependencies { - compile externalDependency.avro_1_7 - compile externalDependency.commonsLang - compile externalDependency.dropwizardMetricsCore - compile externalDependency.dropwizardMetricsJmx - compile externalDependency.elasticSearchRest - compile externalDependency.httpClient - compile externalDependency.neo4jJavaDriver - compile externalDependency.json - - compile spec.product.pegasus.restliClient - compile spec.product.pegasus.restliCommon - compile spec.product.pegasus.restliServer - - compile project(':li-utils') - compile project(':entity-registry') - compile project(':metadata-events:mxe-avro-1.7') - compile project(':metadata-events:mxe-utils-avro-1.7') + api externalDependency.avro_1_7 + implementation externalDependency.commonsLang + api externalDependency.dropwizardMetricsCore + implementation externalDependency.dropwizardMetricsJmx + api externalDependency.elasticSearchRest + implementation externalDependency.httpClient + api externalDependency.neo4jJavaDriver + api externalDependency.json + + implementation spec.product.pegasus.restliClient + implementation spec.product.pegasus.restliCommon + implementation spec.product.pegasus.restliServer + + api project(':li-utils') + api project(':entity-registry') + api project(':metadata-events:mxe-avro-1.7') + api project(':metadata-events:mxe-utils-avro-1.7') implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - testCompile project(':test-models') + testImplementation project(':test-models') + testImplementation project(path: ':test-models', configuration: 'testDataTemplate') constraints { implementation(externalDependency.log4jCore) { diff --git a/test-models/build.gradle b/test-models/build.gradle index 4cfbcc1399e7d..c74f7249fa1d9 100644 --- a/test-models/build.gradle +++ b/test-models/build.gradle @@ -1,5 +1,5 @@ apply plugin: 'pegasus' -apply plugin: 'java' +apply plugin: 'java-library' tasks.withType(JavaCompile).configureEach { javaCompiler = javaToolchains.compilerFor { @@ -13,8 +13,8 @@ tasks.withType(Test).configureEach { } dependencies { - compile spec.product.pegasus.data - compile externalDependency.commonsIo + implementation spec.product.pegasus.data + implementation externalDependency.commonsIo dataModel project(':metadata-models') dataModel project(':li-utils') } From 23f24f4e941488c675f26e4fd44662c588c2caee Mon Sep 17 00:00:00 2001 From: Joshua Eilers Date: Fri, 1 Sep 2023 09:53:42 -0700 Subject: [PATCH 35/41] fix(siblings): space icons out (#8767) --- .../profile/header/PlatformContent/PlatformContentView.tsx | 1 + .../src/app/search/autoComplete/AutoCompleteEntity.tsx | 1 + 2 files changed, 2 insertions(+) diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/PlatformContent/PlatformContentView.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/PlatformContent/PlatformContentView.tsx index 5605bacc1d4e4..51a422ba93418 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/PlatformContent/PlatformContentView.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/PlatformContent/PlatformContentView.tsx @@ -14,6 +14,7 @@ import ParentNodesView, { const LogoIcon = styled.span` display: flex; + gap: 4px; margin-right: 8px; `; diff --git a/datahub-web-react/src/app/search/autoComplete/AutoCompleteEntity.tsx b/datahub-web-react/src/app/search/autoComplete/AutoCompleteEntity.tsx index 60bb21713ba58..d241a3895f19f 100644 --- a/datahub-web-react/src/app/search/autoComplete/AutoCompleteEntity.tsx +++ b/datahub-web-react/src/app/search/autoComplete/AutoCompleteEntity.tsx @@ -20,6 +20,7 @@ const AutoCompleteEntityWrapper = styled.div` const IconsContainer = styled.div` display: flex; + gap: 4px; `; const ContentWrapper = styled.div` From 59b59c2b9310091be854151ff60250e5399399b2 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Sat, 2 Sep 2023 16:01:04 -0700 Subject: [PATCH 36/41] chore(build): upgrade gradle wrapper (#8776) --- gradle/wrapper/gradle-wrapper.jar | Bin 59203 -> 61624 bytes gradle/wrapper/gradle-wrapper.properties | 1 + gradlew | 18 ++++++++++++++---- gradlew.bat | 15 +++++++++------ 4 files changed, 24 insertions(+), 10 deletions(-) diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index e708b1c023ec8b20f512888fe07c5bd3ff77bb8f..afba109285af78dbd2a1d187e33ac4f87c76e392 100644 GIT binary patch delta 39359 zcmZs?V{j&2*fpAoZQHh;iEZ1qlL_uP6Wg|}JGO04JhAO>-cw(_b)Hk_M^|-qU%h{= zz1He$?Q3ub47dkYPxk{M>}1uG3Kj&!01E_!HmSfAGwE^-3y`A)<%_pW@MS>%et17D zI-`OqlRebP1`iWB1OpR$xdusrn3}W{lBjnP#Xx$Ry-1^AWi5-9<2L`pZ9$l;K$H{s zV`Jfmy>)>PQpa;{@{^BeW3}^|1EBZt^w0O_v+pdDkoUD82xrtItU>v{%T}?-aMapmg^L*5T;@!@o-f)SzU4*(-*q?d4VnqE1zr3g!Ifm-h=KM%z9o zL|c{CVbD(bzPwkH**j)?mpY@Psx#Cd#x&2DTLqy%CHo3px}kkO=v|uq?`qZk@9ONk zV6cMah02W`V4gJG$D`Y{nYSbuL?e=itCB~F36nq~G=TqBQ6cK{#Ak72++J7YyHjr@ z)QvMnULO?(fBCK|t5Dzpn-=|KaE$tRR(;5ED{w{yvIt1FYdd5iDU+x?w6k z%ol7AZCYK$3vW-se%4EBkYm;FT|UsL&bJL-(i}-73BLDOkLRm%{tch}-ZIbLpTsUX z{yL5k=Ya9O)N=db#6dprzinCL$u(4$qQCgh+*)Vj;D9a=F@MvN%gHdAS&y~lc%yjm7$FyR2d2paup~3%_kHp)@5m*1@(K9 zqNk{5G|?Gk7)%vJ=h7LW_j6mS*XhWIl~D36xZ9?GoC(B2 zEYTm+xyj~l4FAqx@G*4`E?X{29T(-ZC?*&C*&w%g^&6$lM6FE)Qb^_uO)N$2`qpH9;5L6s5WFRStCt4`$cN7p8!8v^hhMA z(SzN>%IqOLnF2La({t9Pvp~ujv%&?(;&_3=qzeII%VU4i`-5`X7`m^bk*n7eJFT+& zbM|kG2^5)}o{`gBy=&QEeQEa%e2k<$}x;Qp3g8qP? z-&meMC@=MR5k!TTbg=AAGvT>n>owRvkcCukfMG-jR6{%zO8&G2Y<7VR-;z1DFEsw8 zd&e)KL54Oj82;1rnF=!w0vrRxHZKVNRlAwP5fR!7p zj}-kub))D?{qkK$+^<-G6x5fhQ3XKrcI*{74y(iU>?Xh=uDL`-B8bLJn({kw5)Cwl zaLi|oMwOFm!Fx(@wF7V<{ewZM!YWU;-MV94ruAF-H+4vQSp=(u&Wl((C9N1w)*^=- zs%^|qb-@UwkUY0S6_iTK%8=6EubJhT9FgVAVmZ*%g$8JFLn4%`@JC{8a#R2*Ltg7q zREaC4JHz6LVHLO?7}~lB3R3F&Dt$#tc|LZtxiRnq9Nc7aZW%zRLb?X^c@lD&+)0Qz zrARRT@6pF(6dS!Pn=Q5YmH8yFRoser3ky4RTJEL25 z(OMp8%}CZTFvZ%vfp~cF^6qJ$PkU1IT-M0ECN(mG1d3UMt40H zUvjP!g6^4MKJ=fhTMF453F9*oMmdlHfo8cgr>x4dOf6V{s8+(i)9^wy-|s|HY9{KR z@t)GHb1p1RnHO7KN6i_U!!6T)mEb7Do+c1>5%X{69BkkmO?eUA@iJJQ#)1qx@lQJ8 z-^!RPqFl}0@ZaNt#R9S+6;-2{uBQp8K6R5B48)6jMtQlvP|l#Hmeh+ z6O5v<$f-|kP0fB0hh04V&Y+8YNBfg3m4}k4HP+MYr7)N*>^`1Fq2!U9pqZIyC#Qig zvz8KcQ(44vx7Ek%`!`gK<{{>E^U-BW z?AhK4-C5OxA_11h(z9iT+;C-o3(H2_MirP|s_)6Ucvhzok`xJPb3e&KG@mbNz2YR~ z?#+HBYu4T@%ZJy)-Z4o9APHYcGJzvHaUrR18K$(Y0%>&h;I|KMK1aDN*Dw;5Y1MQ_iW|4kgHNoHQ+?KVgxi`HAnp}(z-+s z9G3O@EheCFx+(c0R>S0&DCW+PvaHaM-i^)6vuNKVF!V{AM>Q#Ul=X$`2$eY1b~TwH z*w7k~i~#76Aa$H-+|e@)hH_5D!ob~p&Ik@La>CIK=jtpmGVgFUtIQ`cB1|Xy;)B8F zZkT1QxZ$+`jJ4lU4KUll_#eRExf?;IYp`HxyYQ`h>sG_^YY|xuaBW6h4wf%h1BCpM z-LaZ^*S`=IM_UxMi1rq*um$uZ!t?KK2`L1|BmsX|l9m6EL$ZifS2>YUWE-)^_75;i zVAFuX3^@^?Pek99`b3lT;1Kv?QjA??2BrpFok%c)0!S7M^> zAQq5N={ssM>@fY;1S7a-{L{0Rn#|{(wQ=GWdu-n>R$Ld;v0*96PUq!&piew-8pjT2 zba?op(E9@OcTNpF$VFJW>xx#z~`9 zdo9wy!0n!Upl^OP4{u`Ks@k`M#N#H-ZGku0xKE;hpS)Q2Ch>$T0d%?R8I&rC|De!`FeL{; zYbpIW(xHY<-0dyVKNnf2*cA7L$Q#h?6AP9x`6#>1h<2WF2wYKV#YX+ko}9{d?BU^K z^YQ=sy2JQMluFJ{s-Y-a7eYBiJ~V$UE_CG#@3e)LNdkjEh>yfALGE>{6ci$fAICF- zZ8NQDBe*s>2S`|ov7@&g%g9ERJziQ1>1m;`%_y#{T-R#$s5!hPnD?3J5`seKG z>@|zoVa~-hT}1sEMqL_HV;Hi+D$UW35*;l6ny(G@GvH}EJDYnw9Jx*AN@0Yi(vB&- zKefg{$9ENT9Us}Bt2*%;ukokRJH>K1m`9$=*J1CQoR~suxW)?WtawH@vsOetSTi)8 zqIe#O0zj~QZ;CPwDv*-W_tT$@A)DYY6*LE#FQg9JTPsdGH(*}pG^CFP)%DPZ{?a56 z5KO5DIMx*Z*2pmO09CfeaD=FVU@69i{HhqKj2M67Zl%MMv6f3g_p@Zeo$x!R=jDL0 zBb#93$&Q z?9jqoUP^cW#h+*Zo~r3~xvD>Q+sylO?>I_-N%tn61x5?1&zHpyF2$clF^U ztp(E8-pZ%1~iDXH&Ha#TS7NtM9GYKxQLmfx6KKo51nz==%G7-3vZERDN*!P8dx|-`{<hkfakoor1_y&3B{n_nM~mrwyXTg>^ULqBngw#j2osd1%D2Kvb92~>GYaab|x z42}zRTtcRqweHNOE2RzG?W^{ASI8LgJlvgE0hYNW?u#g~fGA@x_CJ2rQ}KEamEf~9 zgLf8w)2VTf@WOYRbKjg^!zN8%#?R7(0>|cu%-*BaN%3i&+Y&OHd($d-7)D##_EzyQLParT z0Ivo1uPhffT81~ZK^rKSaIIVM=7!~0ALB0^MS7ooMUISKR;?2?UEeM%a)LCRR79uI z+qKhtq~;nL4y9ED0yDVahJE{qNWWQSzxkcNY!$Rqq`}p=wPy1OqND0z?OJbW4J2Kj zg@elo7j4BRQ)j@`VfQH(IsD2WI3q}|fG2pia7TnU-vh-Iy*FR)mS3KeX@;-T3`O%;PY1Y3)Pf?Tv#8t4^mo3dsNiGGgai?*T zE`PE`8OEiNBx|e{SQ|XC232k1u4O( zdy-!y)$2-2`28X=^fBFs?uma@r5~>?_wlD8AjZm0k>-ZDqj&y`S17t( z^3SCC7#V=drUvp?ECY8eJ?a4@>c#@^68H@Sm~`J-2sm^*I#ijvi0wiaJ-TLpDqG{$ zA;ufDUzA%ZK_YAShi%#QoTPP`4>{Du8u8+|+&_WH73kdY-)Pzp`hGpd0&(>sKcHVI zv(idt31OGi2n(UW!O=r7!CB?5jhcoXtyJnH5e1C!j!uI(IB%soLlI42_d!Z3NML8- zd&l~~e?F~B2=ERvr&ehnsjaJc(*BaE!meo1X{pTQ;~8AKuF%kLYb@?IOAqUwa{={L%h@oAU$Noi9mFA7vs07pa5rR7mqMLcW^ZXM(Ft4Q3QY`{WNxk$s?>gy z9|TlxhDuaeCkeNkW_Gk_D4)CMEG^^`7wID?7Yko1wZQS@G%&+2O$F8Hczi|JFUiHm z;XrP;oJS$rH?|05_*DBBmC{yfD`417GEc56j3;wFAZ~{A*XfuG>xT=xd2mSPNx5KM z{jk5c-&CuMMS@3aRZ4Mu<7Uj#BM`HMD-KlvKYsRO3huqA38QSnqc90# z@DEwSN9E=crgvIgR;*L7%@Vm6?}CniS2E%3OY&(oOvF?=Y@E-scV|FhWm1w&;{&8M zq?q<8jm%8C1X-q$dF57oPaA62@eOH|lu-#e!XRejIb)%%fbW6zvco1{y%^yJAnlBr zxs|DZ5Kqd3+mm`dewqC_{bjcgl!3<+P%kzhN~yNVG^mqYWP1QIKHlV(=@}wymKT>} z{~nm5%&*i~u@8%?VcwSWc5hR6`wcKsCdR&o=;wsG#j!Je-gKz$3S3y!srNZzf8a%> zJxs?G@^hw*7|lN_+o~>KKx)CKc&kL~hZeM@Jpb((r!~ zV}KgmZ0?lz5t^2v|045k@G?RAy0*R^KYbw-{ zVGToaYOsD}a(bArz7nM!L9;{h%W5Fn^$GkMl64~T7bPZ(k?J*F!N_|<-18e7=C%)J zSoQJSg_-kS-| zb{jXR93lY2-*Z@o%(wk77mlPSIot9Gm^BedIef0iTdvnUIiLQwM|1(WHaO{PGe*Kp zH2h1GWN3W6`s~V_$}Gy}Qe4G!!IYwfTD)dbtzCgeT4z$4U4(fLA-uZ+fN<8=*wy11 z5>1u712a}HD!;H8%xCe<$I$c1pSh6^`Ne#8-c)3@Ub{4qRhJziE%;aOL9hmH{2a~U zkx2N)&ji+JPa&OP+m8&IJxZId)aURPLj3dCe8V#=LPxEG+XVz9=S>+ZSvFnD!~&j% z=NUib>THYch~r|hv6sy{0Y9#L0mG6FC$igr9%99X%4aQP?9t*xZ}ckO-7mXtjpK~p z4d>va^m*L8nOFAl0GSr}Qlno-TBaIKN#(5ij@57bB})siY?mJH38C<{+H0Zq#z>r^ zSDv(RlUgjoJ%aB$fG1QR^54cswBqo(_uD2~u{+~|tAri7s2xIsfHMq7iQf2OnoMI? zLV*>e7ikW8cD8Z%9J6Dff)5?25sY`FoGvyNod^}p zU%F*B^2~Fh1f9GSKNpd8v9aV;Xpcbl_rPdjyD*f-)S`)W^=l1fXuc!oBoD}GqdiOq zv{@bYc^>*yABE3rlcCSNIsG7$R=w9SwG@9#Da9{RQoNSQKzyw@>0A|d6R5RiM0CxWf8nNEIe{B{dLu-cJj#I`nl% zdl`a1vmFMqZvBSx_LdqL^!d* zK-O}DRmlshpWHv_<0sGkJp?X7O5?0Iy-dL`yVL`_4UcR^UwsbltKqIBTe3cJ7t64b zzmn_`qox|Z2XxciuqT(Fmfsxmx=lV$`SO%uOOg-VW|(p)F~)U>>g(CS`BhV22Fqt* zF|zsy9kNjkuI&j~5)1^0p(ja{;hsi)Y=+m1c7WFCIX z!qLU&CG%71T2$r8J)f7-Il0@{`kY(fZv0L?(oC3CFv27mY4flF3v_Sd!OXn$INj7! z5jV2Fj5eQ9IJ^31tlEl!tydbe+=cj3rxzutOh?qN3~h>Ss}LoH@LqdUzJH4WiVDra z)$upkA;%P5#5ecfLdUSCO!s$BJN8NrKV zr}?`s)raQceOtF@4lVZfd}-W<-Q~L`dWFQ15)krQl}0$381$p!u(g9afotQ-WsC7p*AqOVq0MU z^y$Dj!s@BbZRUg>C^5diicmt;7#SFFpMPLFUp6X8DnmG$wuK15KJ60#Ag)c)g8aM` z`~73ZWG#7*aMet;S7LdKH{Y5wi2B}P5gQ+gb_b;mo+rioNjzR8U$h3OKy3>4m7TC9 z!5IaobE)U}r)_e@*r|}JjV0`b^Zh9~7?cFMHN@?`x+XUCw^#HgO9Kso`4Bdx^BW&C z!U$&yR8{B$z8eG zHfjoCbEZ+*spCoi9CXi3^#sle6<)?eymNG?Hf3lbmDjiW(#jE_2c)X8VS+skh9wGn z9lthPEPLGEg!s`Kv6o7!_|)EVW#y<8f=?g;8^BIY?$Kgj0=0sdZQ)Ake7%xkv_cM& zu=cB$BfX}Xq6}7d#85!#6ibreGo3~lB95C^{U-U|1J~`3LetEqHDb_N#Edu)-dM-l z7xfS}=pvs;h@u&wDLJf+E*@6^e5Mv5qmCGnK#pCB`x(g@O^lkO*tbAN7>B8YB?3C6 zOO4`~uh$jhZNgPKM3_zWWD?;8rHMKNo5xl=upHoNA|L8$q94_TW~+OVqqT#)-L{wc zvr~XPw4tN0DBf^1$dvlLMoGlbRKwBv!8N3_x9_5e`(P8WMm@o{>grN=s)vdvDvq*( zR@5FIVcK9Rj^PbvB1G;Sz%KOL=Jtoq5J_`X96ubb->EiSM?LInqC?dwzR0{c+-Vo& zWne3wz-1r>xgzdx#$ftv;^-I;_DHp9`Gq-Cp1W5gYSX&!Ee-9krPA@_h#*ZL zpa=!AfIcX|db*iSg*@Ja8ZV1^a5xPOTO3p(hzM4&dv2_aB#6Lr2eHT^sIrG_cII3P z6YFcBDY{t0{lyIKM1UjGwQF!>t|uC9W*Xew_$xm;VRc?xNWvp&&D)LJaF>t%ERrL7 zKM3sm>!?YaUxJ4t*3$#Bt|JPAbSd-SL+Q~4=Q03i(h>5GN$m?F%dLR;Bh(g+|4{r3 z4Bzl9HfHY}-6@wZMeG68EuTLs!0RhAQPD%?kB3Tj)Gda0Z4_(D{k_-jASQpDJ5X>e z2m*Rk;U?)k42?R)>0qav_s1|_wk{%Vb5@X(J!NUKU3^i)Ti9zs5wc_{xLsP=T* zc^(5CE+}!bA}VTJob%wq1m`GCcHgt%8uB7&JpP`xf__&~Y!>eFvUiMTL+t%tTKwh( zPp&4WIHg2Bso-w}=NmVvFQorUU6sI({o;Sl>fxW<@&B*0V#+1}=qoN7p^tuB;%%>B z;L!fAlM`EqYYc>jwkdo;JrF~Kr}TR4%hX;Jps=?*9KmyZ==36q5ZMnU>O(T;SJeF* zKoM*%?K!jP>VMpFnb+Iv50>7a7>f?eyFgv4O-tjhA_>X0yq=smAuUJ6B087Nu;XHe z+6EoW1ooQZB(SItcnoz!jkZ+DylQ0NpM8LCQXj2JJFhKYxA0zUotRW8A~CD6k!E|q z69U!Vr}kv9etm|ye>DIJWx@J4jl1xZI`QeFK z#`iFA$1l4gdI;7=sNOOs5EYxNC(}yO$|bj=C)72ne7F1z*bk=O6j_<9>X~$q4RsgM z1G=Z|BxHs7OA`L9_WbD<@U-$7O{R(8PDJZ!)Pux#^haQ-A4FTSHG@BuqgKFBOV%Xy zrnM}>^Em^tmGr5hka02O_5g_dpk8+Bkf})DLNO?5SE|Ma4^Ldh*oJVzZEl1}2Kl$a zXqS``2N*mLAjgDp?PfqRz9avnn9A-73~9K!B$mTBOJ_`HT6IVN=bR_7%-B zim%GO{^btKk9dmO#VQ4v{mi zsVrRZu0R#d^2LsueN;OYSqfJ6gD*S*x&vFd#l#v55aGy>JMt{NX^&qsJnuw^1&Zh> zJ>)%e%LDd+1t9K;>RC8sY`zY6$^+G7WVhRUBg;;7OEsZ49I*oL!JMT!!W#Izfqa^}X-)>u@P!lA2!yu?z_#VT8*aXo0@_&2V5m`1F%6JaOHo zs=nKp=Ro)%#f^X~D3TI%mVXsKnxucQqED)vNee<$X{QdhwqCFlWPM5Kzi{k-6^_)2(!sP~>9e_}q_+QkQtL>08xf0twqX}(;c+z4~JX2n5Q>NP@# za0ic`L+hB_USE6(;KRsZ6g?Ynwd8r4K*Z|N89yK#estgD9JUGWtd$0a_R%kPXPoFK z$$zz)BvN$w!4J-f-C_WyT@f5sT`3f`Jp8Qhctq2qI^=+KQVh-)2&00;zlo%u{Fyp- zqvJ}~6|>G(OnWXhrRaDVlsQ)Et&2MpW~IalV94FGN3vr9{@Dx8XoIwW-M0pUBE<=0 zwJg%%rp%5)Eh9N(fd&=ow}bG(-CHV`oP!Uy+|TFY2upnO$V4UOA2StV-D&+?gu%MfXKl?~dUN(=Az^ z2Ho$YFTyqe=efQO)iEx@2o2do`_ic>?m&LFBXLGDnqn?9I&R?)=4_HGso%^s(qx@* z7WH+|II5k{fVh~A{Y-?HeIz-W{_ODI2kwf>iAX5~k~{6r7Q9b$z*dDcQA$^`!#VT8aC zr=RNkn!j#Eg*0TUWb(Af2_~XDT+fyIMisO3vc#se;UJF=f()6$Y?I7y6UbWX zyKhsYOk@_Fdvcc|{iqYi|6&rpG0C9wAH`PwqgbwgL5ZV_C6lF#v6)>`w?9f!{yssH zbRic+?`U~mbW>fFP@C78cU8^mPRGj4@4d(Nx2x$oddni; z42(khA$-v*esB2CE}fP^X3gBZk_VZupv997V9(!caP{{RvQ*}+KxQ%(cs-GkpP0cS z{G9O;Vd-fykeKyj%r2PEudxR!7kzzb^f7D$QyRc1ix9*yX!NEZ(4_pK&Z+og@$ zggZLNyieG}%Du_QFYH@lSRw9t%t58JX-vU|q~qz*%jMq=Z2^dyotC(#UQDFeWqF2? z{vEI}Nfy#N8qw)9zz2jNLRxJ+v*8oDtnl9%i|Ve^aX3YOV)zsK=FI1CA$xh2uJL92>SzMY2&Aaym@DkPdaafev$8v#Waav>S_9wesGn8=QUMv3ZIhS-X`lKNOId|qFg;~}? z8VfX39btnBP>Cadbe%^5y+8%N0!7N#|Bo37`BrvLNkXnv1(Zx;(u$}z2Wb_QjH&#D zrXh*{*VL&@Uy47mr_O2wUL)UHe_1 zPx-$-eUSx-SD#23ON29GAwVEB@v2RY9~Fdox67|A)#n%d3nJR|`L>LdV224HTh1=E z;RoF}TXI{G0G2+x-RB%OZf8@Zd%ew+5{-GgL%QFHe= z(V6Eu{>v!BMko$7SFa3BM^OT!tXZ-4s832jpT>2zJ*d>Y-VT*eVOt3ljvSgLq@f08 zg1y!$Z0*x&bX&KPsLB+Icf!8mC(;Py3m<7lfiAf5wE77^`7KWP5^y1u|ulX3L2@3j_a zJ+fNQ^|-%_{{dzVCad*t2N*XojaEWeXX0r&7==GIEc=jAz7d;^N`sk>v6}fGPuIWY zVVwX}8no!c=Bt)W($BsX=O~=%+c2&qXTuye%zD|h@;%FfjpGd}U>QS8tbUUw(UWDR z3qS!R5l8<~b;hBz*%+4zNUd=)ebSv95+F>(+o;M7Qrr(r-VaWFJ^h_SN76SSb{ma+X$GpCX^jV!LWGJ>O)LU;rvxwy`2rH=1R4nmAPf(m4{E`KcubYVa1KMFti%>a@+ z`hUJqiFotol24yZ!NIhf7Kms*lA<`XejCVWy8EJpG4|TIgK@Y>$iVxp;=JT8!i<=b zgm;LlXlBtmqQRQpmR!3O7Z$}OU`Y+=bo($0!i-|M96pd9jVpIL%Z&ZnqadePlB&w? zN6`S~8MY`8rl$Pj*e-L;oU-op$_C&#ti;0|8WS@O_hfL*F8IJ6q94}a>#R{+EMeXA zJDwMD3JQ@eeL>0B`}Bw608vVv`EB}=p23e_!4_nt+KR+5@)s%2 zP(vZMKs>aaLCRy8=ei}1gY@gHiPq9&WfASUWF)FkrZwE828+l^)n5ymzZSegYJk8O?%HWUD;Ly<&K!Cdi$prnJ=tf+Ilr%=TQxwyIUIrbokY^VwHc zv&J4n`;G5k57HtrD&o`hisw1|+V@x&aLngvAo%rQ48nF)f!}~_!1Qh_~o0w zCraSpn}1OEB6WPT!~UT^DCBZWDY#N_fJVT>UAU)8FzC#VHR{BV_3jje6i8WU8gAxI zp*u?LY`wq2qQP%4ztpKI;DR1nTO{k!ZBm|Ke8!>|6MZ8?Xr7Q@FurP*9Bcg&z&B|n z59@}$4^|g#OlP`mnys^^8mEd52v)_|xjkbM9=EQ^N{@{6}@x` z)3m63lGC3_)UMR57+-JKqG{@8l}w^t&Z+XqIZk<%d4Mp`F8_mNtx{tOVDO$a!M9XQ zor9BUYGGAKDlyP5RpXN!HFv2!`&W!>o`3J&{xkJlh0fhTTiEynRl8y`)FnalZ26E@ zo)Ht9W3L`Z9~E}U#6phQLR^v`8)kL1sNd;4aqMsGPj=)uF%E5p^k#BoHroLfHhj2| z;IypI2ypMGvj=xu@B>BwVp}+w?%>nm#7}k$iSvU%V8uAa)@@MI6bG~fFMJ&a`Vyom zi-Y~FhNNx{&6&zJDp%2ym0Xg0(EQ36Prntt!Opr=J15k@v|0U{U4+=Q|3PN8B2NA< zTVflFi%#MAEEXpkF1fZ;qov4{DjSU@`|7G%{Z?o5$4d1{#!586OX|NaAYu7QG?XZY za?|pbMguEync7}^5DJR!+OWPfA=yH?Ef_)Zcm+~x7})I$HHTg*58tdm0>nTK24`^l zB-#8*b}H0hDZ|AU#32tWGOT#(3S?WxG>j`uvbm)80nXR{RCxt0 zZ{f=3f)V{3Hmfy2Y_`jY8{3e0jFH~N4VWsHDNiBfS$V#-WhJ3WBO?5DC#6c<%!`ar z`8%6m{1CTM%)7qQ_DGz zqlYJbkwc(_FTqzaKz(w;)DZ^EYZ5lt|9jl6}~L@?+1G z0~{-fYb8guhB)s5EhYhl*}!eUn%$loWt3~;U@;CLmxMjkHQWlVbv+Ag6I}IebDkx! z>`C>6+{`s+$!P#Ey4(aQ#D%|Ny-+TlL<#P*S}GO)gR71?W@`osfxNsFKH5qMA_wm6|Zg;ZsS$#_)t}V$rj-fq z0%;B8)rcSHw^2P`0Mz10JSU6`)Os=DWzYmbI1X?0OrZwRx^X?eUJU*;o*_(3oJ0n2 zjDwn{fZOr!)XJoJRleOE!6KYu3rdqA9i6V{=?4Nun45GNiRM^Jm>z{Do=c1!l^Z%^ zGzJCr-!v?|JBW4-6n)Dv#C2(}=D*9$nZug(QR;Fg)pnxZEC+%Jz{KVCMk{tyj>BI7 ziifp8YZQ2ns%Vt6NAdRlc)TM@Q3s8%mqa_RL(JDkl1{TSl7Ax+qCKRB9EY64mAyNV zLyaVDlLYkD_(m$jBd$39`WPJYZ|Y7X$diRV=OL_@IcLskB!)?yNivl6Y+DYTRG@L% zJYs)8nOMRuQU!1-)g+4o8AGSwBUE|;frk6`qE`u7;LL~TE_M0Km2;vjfUfC>d+1w9 zosXDik1l%bCn|*JYGm|0gyD!oce z@a&$iS>Bc5(|=tkhCP!-du_+aXiqCs!3XxFwur186w2-EN{m6Jb>Fh=GPU@TW8T{! z5A45A%@-#TwgT`tyqDrf4}CHL{I#f#QAXRyJR4z-ks${^elZ}QS;2)=D6p>`I@%P4 zXVOE>Q*2Gza4Il;Pc79C$|d=QU1$bHGwJo-R6Y`DYJ`tHpSf2UtdAfJi-+-(C@z!- zOE5&eiwC13{8G+iqmD}2UJp;yp5gk{KQ#+L!iC++JGAYqE+}{khyxP^h{t~AL4%j% z9h0&rn^N50i;Rz-5~wisB4Jlg#veT!<6zKCI+UGp1y6x4?qyKepsK#@CRiQ73|+Ka z<2))AcG2N#wfa;C<P~ahl?mRucb>s!z?Sioux;TwHVq_0j>UwW zlkOFR-P2DL<4z{~uGhfoD0oH+l=aR09iQ5@j*;NyTLE7dd_C2!>ya+!&Jy%ep(1T0 zvLaU$u$>v4t`sc*%>KkI4Of6r@qA0bIkg3lC_m}gjARz}A6m8|KC&3M22Y5x2bFDK zG$MO@?U>pB#2KvFkFj{x+2NqaWok3D2VCCN+e(Iib{Aep=3eaygYbN2jd+y}T}DxO z%9gXOr1EpW;lxqxB8##)3+LUpn@vY+OWqWPyI0XiuO}n}JcPzx9&&?f%H930Oq)YW z47%0RxH51!8`D-dqWvUY-{ zQwXyf1JM1&t6tfOcJ$J;Z50m+UE8f1TTa-cR*n?n6KiHKrRU{0;%I!7ex6kn{|VkQ zkSn4CD0~a1#g|VMEj@W78vnw)6p*;RBll=79>jzBqn;^VK|x)|a4t@QgK>oQoG*rw z_K>gEa!}PJ=!i7oLAG-&Un#5X)P&1e=c@S^{g66I{bJb8W|*ratygB&tb$7Gj3)KS zozJ!SeJHM@A-++n0~qn@AcrT%C|<0~n5Dc5@bi_>Mw+4a+loZSP<&UOI1a@Qk75pu zP>NCKCc?UJ$Nf=l?4lRM$Zo{9!iurNazD90Yq<@Bz=)Ojug`9xsHnzM*%0)(tL_!&N^GRcuVmU{_v4btk#z9<6>i~=acnx) zSWli3e0th7w%Fo+1?vgEX5}gybEG~76ixMxCZ0Md`^5oo1`=k||y9rs(kpw6(RG7N>!Lwg+Sjn6n8xrzIzwXo?&AVLjaVk=wy=k~tzJTCEVIEbj zj@%$>`bSo@TnQ!)g||6E?=Uie%`uHrjuG`Q_GVizjX`6<(7!L(s&SR`ev-_&Q9X(k z3`u4}vzO3=cOFa@T*>%-`1=kRw_Xlld#-{-Ax|145WG}u1#2Dw+`AYZcaQI6KL#Qn z^|2McWFwZv1g_qg{rjv|fY+BVB>qEI{zR&`{Fbi39q=xr`I=(%?c|X`KX^D6jw=@$ zgP}Ne!O6{We&cKNkC1{KvbsHtk7Vt=q~ja5tAv=0g&56O`Aul2;P z+{DA$85AaIV^`~oG3tgCJ%0>90WAiyE1*`JrH%DzT zNtV3yU*Kh^!g0o3p>;)#igQv|{<1%2qEh;%4vjGdT`wod(o5zvHHqrbjy5Fi1ng$7 zhjqfHJ9Jiq60!Eig#g~SB-M&_Ma&G!lIrbL7s3iOqBetoOci%d!^&Ula2<+W!`K^Z z;nqj6^wcp}j?S}fG$(u^S%&vZ{%J2ygtwPmS|K|oEdG+8-*lmlCDn*K{eW_|$m7>K zAgIQ*{+o*Nnn~@aqHLV_VLjbvbmIwNQbU>rvDOs3!ng)M)B-0^qUt-uWvy)H`uFqp z*6PSRv0oX0-Wgcj`fw9;A)^^+J78ePz~D4p?V?M3t&ptRW1zPZPb5{G*ZD<^i#y%5W?3w02}lAl0T-QQ2ap|z^)a%-Np|vJLydmoNkaX$BE|q zpI}$~QHJA5HJV(u2k7*Z?V~UCTAf>S?3k!JYwX$@E=_zwWk*x&>h|BYS~Z)x!!A8s z2xp$)?CQ*Ya_Hdr)~?7uAMJXrNRG0+#*;wr6`PiZ{vswI!eb);zf&r6CU-Y$J0`Gy zlRE&M|DPAu4sPZy4#xje@!vieuA$#n+`kA*^k44vKLbMln|uA=7611`C1W=$H5X$K za~D@*J5eihQ(JQvMpHXuSJxCZS)WZYbOE&W1UovvazF^BH4LdAxkkhirpj1w;(n=; z(_%nUa1UMpmd|mntKr-E zwc$626T!}5LTK73kKVHLbe}uH_0g^1-@7>{(HueMn}UsF1bXUDaWL7luY`^*_M3)4 zbN62JT>u&w1y~zG^`SFV-rpOXE868_$euZ63v7csQK9zn{AqM99M%#mP3CO+_W~_@ z4v!GMW4<5K)h=pB{gi1v9nT>d#aE;720V!zb(F6T#9bQuP`N~uB4PYDD_$ywX}*eM z=};vd<$@S43H{NprUhM;L_Nusxf}IFiv#TVG2R@m5E@hSFNxkbQockkdF+6=k*#xO2H22cE& z_WN4pxlfJz#URcpYpFluWpkVDS2-FAnlsNG7SBpdRv)n+s68+WW98A1oRmjbDO+#m zhREV8dcH{on!c-04EA;ypg0Z>q&U3>m;plhT5%I~Q2aneNX*O)-RFZ82{xX5)Df68 z7!1w|Hf*Ld#hSVAMz!Nc(jNPWdfEjE7BS$a&!XZdv4U7|ffCwyW^|?qZAh(fW~iG- zxq2Aq|KaKzcr=T)ZM$sSwr$(CZJS?Lb=kJ@(&vzMv1ZL2gMD-Cm@Yx0k+q#&I=A4I zaO@<)h-sAy=(4!F&#`7HPu+SF#Dy$1j$0Lna`>5MAyx6pn5;4~swO;#VlVHsaeyQ( z@?^rh-VX#3UV48pJxRt0ejzcF@{ZNT6iL<>mxn@0gr-p!UIKSI+2ERFqQig8Ve}s+fM+wW9&F51Ai#zWN zvcD@dCtl)M8^_D0OZQF3$>o;C48Rw7m(DkzB>Y8tHiXG&IC$*LbUM^YXhf`&KnMVx zIyi2Ir@@HSM_R&q$mW|z5`iF+nnEbR4V{L5ToHl=x57Hf9Aw*~eS|NfG51ulR$-xT zo1^zKxukY&7CE180;$nDRar8FHI3isG_U7jyHch5vRN^5@l>Gp(ncZ@TkGvR<&5*R z#kWc0GGbu})-T||l>&(0vwnvJt_6T{)%7A~InoR^HCb|fYMYp)`#c=i%h7}v=&)ab zEi?@N92?KpYPp5vNcm6Lu6`S(V?_mu90b`dUt|H(;|^Us0!#}FrPMOI6CB>7-$kcf zD?0mUQ|PzZS0<|lzAmHNgzdexE$1ht#Yhdqx=zi*QuIO--o>s}zp%3}Spk2#6Ln3a zkcHo-quSP0vygSOvz61-^qn8bp;Q+(G_Q_APU26FGQxN5JSV2m%}cLS-YOj0;QyZH zCDL)&o+qUCo9a`040`x2Z0BR`6t*@LrEY=`9n$qNwR<_4$~tW7~emPR15LR-RDN%5yRyHM$8jfgKex>r1= zPa#PFGX5#W!CdIVJ)m7qT9@1cm`l**e2RR#1#%7p^Tw|2hb_jeJMzU7?lfs05wvE>Lg4zYScoiECRP{Mf|peR@PrbUq8awnOu}gSrv2q z6b(*1$aM~GPHmA*qC(V_Szqb>`f1Z8(X#7d2|dP+=x)pYOoSN6hNo!pB*RjwlVlFK zNLI8va^)y9m7V}qD*v0||3?tw)=HxV{}6=#zX`+oe-nm!1rw00X6K?JhU))J{z%?} zgKb=_TBRDER%Dkz+&xRIsI187_t!yDJK$H)m;#81}V7rZRjGjB%{7WK!H3~S=Sp$Z8-a^4 z+cZ73nfX+ns+XBR(CnkeOWBgkK5#Mfj67h!W*ppr!-BWi@B}+@5oF^8T~=)*ACV%( zs{g4az>K-hnQdm8jh6~hlvW}rwa-X2STaqHLI<$ZL@cuNEp4jOg_7#Tsb(Ec>|(-C zO;2rVnsYwY`)3xAR$!b&G%n(zRU<}lT)=Lf7hJMC-N1J@O~rZ`hMzOYs;F0v&GE8X zKK~O#Z}?5MT;gsaJoQrsiP(!P`^0YJ$Q3>lum(p`P z9h!WZ#V5Q+oI9e!6mfH<;DmD<^dB^W>jC6hSifpSU4cZl^p{BZi)ieRK8ld0gi4%y zn!$9vP_1g*KtPonD$HSky=Dvh3KPVYKEip2G`xaTzM^>CV#t0*Lw`dhj4Ae3o(^r8 zq7gsRb&4R9WI7c|DhDlMmzSLVKsKH*x;@(de1pXP;l#COWRwCY8qzSggt7gn2@`5T zf-9BAF0rl?#1>*N6Q-}5JdmTjA*$HKT(d%@{~97e)h8%nc<%YHD?wigEejrVV= z#5^Plz?3$W@Bc2nyCB?cN1Ww$~Dv=3m}Fnm#An zPJB;zPBu~#U)OJPed5DX?q}}HgZxkOV^3cI_>;p-jNt)j(+_c(c=+BC*_v%LIc11@i-2zV!6#%T<*pT?X+B6hwN{iRf$y&{ob`Jw1{#0Xsjkwfn3nB!`I zn%h|&U8)djCe_G%ao8q&?@Wc4TV#Fe5EfMZlKXUg?db_U(r+{rv|DKXvwV&>+T;sf z-I>b4{3YOe?LF||nP){39#wt`-<7a@r6ck?y-MeXSM)F(9aMdaRXv6fbtYTH0dc0r z@O(X3JZKNY4Yh7bGrjy&)a|`J{%egjL=RX>-m=@caiL_7icXoMxbcC&k-9v5B`?vVNVS*> zT$MFdu9|y$C&$=+AGGa}qNb1m@kXkm5o7Tvt8Z-+!PwBrrg*+RMbVob)M!|Mc(i5} z9VQDLA+s0IdWVDDCN6Wod7T0W$uIefv(?qPwqlcyC#_~!RJq;Jax-KieS4T)1mNt?2 zKB2x_DZBVn!z)A88w9K-pMv+%Fr;d`#7sk}zIG7;c0{-|JNAOVhNP3Kyv$O!=ejSx zSvig_PPPQc1FJv|?~rl#Abovx_QQpSyOWn&YKd$BPOSOgJQ4jN zZ4}5`OL(~K#7-y$wa_7d%t2I4)==wnN1u)|)3fB_{Y7r^VR7*8_VC0Tb#{=LgThJt zye%8>;Ub#gTr)(3B)#ud$AM?$ehj9y!E74diTAb??F|!3FjCDZS-O-A_{V_hTb)n? zcCDMcSsIwoB^hsdNw8J`smI-T6`?4EzM$eM&d<@LXsWpv-?BO$*Q-{K=}(D7{ma2X zDmNR!F8A!g3~H~;!3-AG0!#r4tK~Im^MU|b(k#d!D0QS}zsH%;UMOp+IS@mF)B_Dy_x)z3qKK1n+nMpMZx8gD=k z;x8&j95l0nov`%2IzJDkXDv{dhi!fnQQLh-rv!7GRn`I{z95jhG$cJ1%hhCPr!EqH zM#2ig?PhUA_>s^K*gn$$ET$*LOeB2PP-c7P_e8?&-;3$AG`@1sT%)WGfkxvL)IW!`f`$gGi!bJPxEL!* z{oc{uw5uF*DB~{J{`Cn#FrM$qHk`3_+BTkqfz9Tj_$(;^1{?U7vHhkpS_!*}rF>Kb z>=HN1b^A0Cq%==?aQ#J0$N=r?CJ5iDxKSzHA#L%74+0*5$P}=W-x29y zQnLKHRph2WQ_jiAH6A;=IFF!2_viRU26-yXkvqh9tz9YndZ|1)baEEhynr*vRnWG}(8bPv(P<29<7sK=SR{(EP`=xlr)F-0AZ`$@+?Sr_mmH?~)#Z zZhW}wFqkatpOUx$Uxt{WK*e%_9AeBep;s8^c*|Slj^0)}a#rtL|C+m(*AMKTX$QhL zn0y;i$`#h}`}{j6e-!@|dT7Wrs5cG3LCdS%27qOTUNW1qK0?@Ikhw#SqJfq2GiF#| zmp~Z6w?(%^!}g8z!wJLSd-FJvF+r{P+D#FGSxc8mW)Lx$H>CCD%Lvm(9(%Rdo zX(MVvn!}A@`mP7xHT>xlcsz^h4m13@7j_mqBq2^rxNrkuOK@KU7&vx6-Le6g#bfhC zYMvFd;xftr?5+=4l^`_C*`Gi$=Jkx=MVhY{g*kQX((iNtdkJ z`Yyyl_-iuC>r{XmHoQi{PBS!8k#H z#YCSb_XE?ZA!c0`gZfCDv{`4LF65i(Pf_>6IrInnoce#7|6I%bIG-j0Uep2)p8d*q zYPcp2U797gt6oZX#byB#>Yr2NijvO*GM&thbhy2;-6(sD)vY|#2FW$`cG1RKS!ngn zm55JDr@U>`&j1)|Vp>caIS)q22VDe0Nu@W5R%&}G_5EMxGQ7*IK^uct3lyD9?6!y= z9nYdjD`C2l3)F7MAFF8rz`{s_Wt9x5;o&dMtFQ;>=P2H7Yt;bNFVIh1Ayy;+eHGV` z`_N)df^?B``WvH7p2H|&v1FgGAE~)^gB)e-vn<9kdim*QD^Y|aPzjxqpQC1Xjkx?M zo69HnT>otPctWrD#6v?wr~`AxmuzrC(9h}$-6!Y) zITYj&dG6}TAt#{0Za#fdhn#QS0^LPb)jJ5=Q8|`ejl+yeXM_Hn7p%b#ju@q9|^!O$F-eg5bssc4ZK^*|h|KKp8`k*rS-YtaEF4 zwxgFv?<`nKay_(sGGdnX;v9l56q^Ou0ccXBa2^SY~OeTJqA{XweQlVm=+G*TfPAX zFd@VBo8;(MN+BeJtCSQQ&X4@)N&T9)cKuV-)UwTQntg{Ow8F#cGG5UInhJ!uDCk_k z1L9QGZ5uq2MwF73gV_@06t`#tjd?plJi@5Ud=^82Zxmf#H+;iWTwb#IEcoDzxVEgF zs@zp0C_A!pbxT=sk#GOrH$w)9x)6V?TdZ!={Kx4m z$;$;s((|IMdLG|dle}DdmI{j7O(s}z+v=>Wfu0{BWz@nVD>?hB!wV_FK?>7-AYV^qqEw?@?bcwsRZ%frhB4YV}OQA^#w5u44H1vvRCGMZ@k znf9@Y3Bsa*7P5CIGb{#^DBA|eE%7fgu?PrN!bk0dR-bJzL0R9n#l)Yxkeyzhn>1E) z3!UKRU2}^e;ueY@rjg&QgR;Ru>2zK&HkfMO*bVUt;rNbI+VN_0(7gmlw9ax|oi5A* zknl| zz3*Vu^AN6Vev&J{6TftYR-EYcW+qO>^`$uOsN2(L#LbZLT{(Eg2A3kkFu{@zTv55y z!H!_gaKh4j9g#~MjE@080v)G1d78g!LQc&lp@~L35MgS2NBE1hz0qjIUe)GFm>T$y zStdvxT7ljUIu3s!*yg%9@%hDeBfz2EDSKyc{{}=ba>;`o5UQ0ah6~$4<7u4A1#c^# z+68aNr@Mr$hD#DiP|5}mAzw7kJ2BQC4vG-rhel~-nmYUywh;%QzV~q=t2)txeh?@! zMv08Z7RDpRR=oPvW;@V_=7_;p;ukjSFLt{R&j#emf7vn4*sA*$FmlEr_)->Gm(5lS`YXH2~@d_}Z+3V}7<#ies6 z0v`FlHs>iqFYCQv7MuBmFq$;HoNph9ejp3*+QU@S8t5)LnIxx2lO0`wr^5H++;UgEKl8 zw(%nCPKlw;!xDZ|QbI5%ky;V_ZpaI8L1NZJbb>7ZkPbRkgOty&eaH`3>wTqpx7kz_ zDV^IfJO%}rvLpBI^aYB{{srrRUOKrsBVfD`E_eeo@0YNbI(tVi*Ld8MB>s{70mTmv zp`miXJ;a-jMiT5nWkSRcec@Cg{6&f?;V;(H%=ZRW6~vTjS{*g64JFq&VMg0_S;j5? z5HF!n&j-m?+tx}iWZrZn1hMCE_lJt0KBFZyz9I(Dm#Z%%?UyWyEcGF;BAr8iwgbCq z_Bd1jmK93+BsA`!=q6AAUU|`f>=4&Nn@jcACxeHyvLmsodF&|1R_M6|G=9Qw%q&}_ zhC!-Y&)Lt)`s!Ijt;y^ zU+-VMX2L(7jQBsKx8OQNV%8cYAX~%B7gYoAr~8s=!W7twG9SkhhpJhIP)oQC86E~| z$P$tjNH|Z7yko#TVJ+RwJw=eOtl3(};UanYqo103$Q&hteH%ji!Y^{U(7Nk_;8|tp z{bTBqyd#s0?D%2K=VAL}tLNtCWjp8NMg##wGZ-)}1Vf9j%-fj;kr~PgK!}M_Hzmc5 zN9wN&Dr7b_^>m8rv4`6utR{@Zy{vuwzc}434tzObFdL*@f?n=+UP;U~5g3 zx=wK7(8uY&NPB!D>I^XJQX0;@HQ)AB03Zn<_x=$1N{XZKPeKqFSy7Psst#*!G9-o)B;GnfQIPox z=e>tn)3Q~iSzoTGaGZybGq$)W@)d}?(Trcf0Vm6%tSrcP*uzZD)Y^6l9Gs@Kjj)ra z%T9$~pIsA{Or+78r-wd?4y&ij8)}uBQ4~PwMB`|?uBxk0O~pI|yj^sNsO~~#mL6ft zDwnIS#z?`edCfws$qV>I4aviZPGL{aifO>mQy1oqO7NB#)9efF&!^1;c!*h$&>`|S zsc$aGc-Xei&3wQYIV+*h%*iYc9*PBT4BPI^vRo#^jmV!buFW z_J~hy`ikabvNvLe8&&B?oRynGSvN>(nc3)a>{~+Q29ys5TXXAD)Ddj1iZxYmX_m_I z=3x6txD2kON?l0Sr5Z_Fn>k%al;3WbQfKXH;YZB(tIwSPi>tHnY)AEyuSr2F$oPad z6mkkOoOF&}{Au>$9?uBYOv`+w5(;S*iGQWGQqDlZz!lvFnVYNA|OEqWtlUK;};m?{i$>{aWKGEr@h_qWs(4QNpZQGub3DfjE9a8$3bwJiq?(D=hafirivUfUnQ z-DSHnul0eoyOqe@CA$}#rMoz|zCZ<%Z&|yDe2e9+HUW>ue$}Y1y|D zNG+Epq^C}w`uzl`pGXRnuPEPDy9}>1NU1>NL={uxn0MX_&gU%0)!r33&}G|yEs^Db z^X=v2bj)d_zHoRxJw8SH64MaNG9&aohFu?v*r}3C#ON;%2^n(AHKly0%t33ikXV6N z^o+v+$B8*MdW+4G69F01i{hb$N(c3qzDc@~&;_abJS~rjDs$NY zc}FgrR%FQZnvuQP9fsz>9G-18vJS)^B^L2K3mxJ9 zB=4sHo*acuyvHU2v^dM|W}`Ps(SZmhRaVxR{uRXM*!~?v_$cC8MEDruRYq{Y8Bjr0 zFN`HDvVi0Qj8#=JU&3#BSD9K+eb9y+fNt{9thO77bR$I`ER0}1RK=XCP&NIA8f{`O0^(d1*NMY8c9Pa2~R)cu4 zw9^Z3zI?V9;rra*J1ybM-%hlEb8X(hd@MOlUMt~BfyaI=~%5>=;v0wM0`#$KT(ZD~-Qi@kJ8>+-zE6{ori|MKCz(#nN>4tHde->2IU z=CYh%*dJml<;TM98{-Rm+I%M%;-DRLVF!x6mQco~Sj7zmOX=BZM)zILy$g5YRjJLO zh120e(I-sz1(a3{1*2SmW*Glp{Es1ZT*M8>NX{@3VyI}=@_zB2Rstp`c&9r^zUmnh zL0))0-f*Ei2H0>5Lsd}2wpV`GbXIy|x(=u*3aBTaviVO^0N3-pMqCjdVfF{)LC~!( zp6DRTX`xDhkrTnv8qhu;{dM1zYBoPF*|{SOd_vNL=9M)3uGk=;7_rO4J**&qUY_L; zW|e}7H2=eDvbtSJ_csaYqd_k)6cf~4=^Q&{{@G6Jn}DW->TKAQLLj_`Lp~x|C$i`w zos=S<#ouq$HEDI;z$}PFV@q7eGdI}w2SnHxTByYPUyHN)Fr7X2>sJAv|COr}fF< z9!oN>z$CrT($jYYjX@_g(5SwBX2)g1N((23CTBKS@46LF!4`T4Z~~_t52fDPxmIUw zH)F+anT%g!%xJ*&#qP27#AifM)=1Uqmip0si?w`ju@r1QVWSxUb@68;HYUg-IBAnG z`rPuO8jqS;%`Ic!?Zi>X)8VbTQn$(Wdb#anl((~tz+LXbf25hpNOLOp+A-_=u2g=( zO28xyD=^BkLf(7>-~+FQTN=3c>bA$#5wmI(b_{casoccgg24W!w3^hXM$uO6oSvW5 z!>C(NOfcamCJAKZTYHjHTN<=NcFVAU=JaQ0)eNWK(b%t0;0pbv_5 z_S+oGDs&k1=n&pjm`W!=oTc|Q{1u(X|IfEpIlW5M1dlDchNV4trK1`X?AjrvaC|f& zsyq@f2hy6MeHz$hJqb=;?XU&FE|-zGOtEcQ&;wb#b1*M=JOpOGIn{?H{QrS^GU39au^F0w+Wr3APqqr4(mbG)?#2nN^7P{h(pxG%6U>U zzXJ(*!dn0QCN3b(q{$QP`Dt@7XO_~py121P{Mhc8(E9lo9_oPa_m5Wyv*hnVDbnLr z$}qMfmtxV%paKM23eV$Gk@*#hsEO2&v1-W*hMMkq8epsBbC&6 zptUl$(Zwzb*Cl_LV5@<2G>fx8p^iQQJ>#r_pU30X@ID^f@b%KkyXz+m_R@~SxZ5XO zxX1JjUe>PQiz{M0Bcpxp#l8S9jOG?Wgw{(dsi{r&mfdL z4*6A^d}4*o0d56VyU9{rm#!Dei@Z1-v@LT6st)ZCbt5?I7_VS{`q#^7g9a$K#(|#DIdY}@ z-H61T&L@=B^ulYRP{c!N;HT9#okQuw<&33o_i!$pvNXEpCzsZf*W7C$VHaKZX~MBx zx8ebVr7u$AcAVszIIa4MS#x$ZKT3%j28%Kk%MbunTdk+v-Q}Q!MV*b+VSTmE`Jp7_ zNzTHyY^yP(yzGwFBvc8H1vz_Zsu@krb(t}`Mh3bosB0*!cs$5wHqIcw&GrCwhe@OF;AWtBYSV zkt9;DIe1Jb)s`cDm|YyjT!nkvT00XKHUc8SPTV%qNbHDb;jF2n;C7Q$V>+gm$gggS z8E!)tO>RwgLYHmzoOf?F#?oS)`O=bENd<7s$(K=uo*727em{~v9+QE}Y7OB-v!0Q1 zGo>D76(z!Nw7!(d!EPw+^qq7}Z}MF*GPG7cVr8v-iX zw&sZHXji)v#um(;^8}!G+$lrAoIuB1Lj}ieWptD}TDg+$io=92w*EGRWs49+GG>yqOoBE+!Uu|U#b5p?{A6~lc4bw?1$OX zU30?8Gp=7YBB>}+$&xLoXO;sSC#@~mHLsa>U_%Y!$_g*l@#NgAy+CO*x&bbq^Htcq zL1i-vv!Mp$bqx7mAREOsK?J4J-Te^iEFyt8($sk~q@_R!!9!L46o3T|QMLI)sG5I> zFIajcSdH5&lw9HJqg^aX(b%sxXY583y#>g7&>(x|FmY%~Nlnl!%%zDy@leZQatVn~ z67ALTV>GdG{ZyjCLDe@sp76R9g zdAVMooVBJ;+~bqwYlNBfwXIcgc@+_+KXUJ6FWyHCgwyYN{X0*PbGxzY?M~SdR*PBY zH>_DbB8Upc4vBy{6ch|lxTB18my>C;bJs2EhL7_fi+Y;Vbebt%t78P|zlueUJ9Q1p ztic_iVZ{gZgSB9<9g_|?6Z2Y27 z*LPY!`O=x$H}0ne|7`kwn%>v63VkitVtI^t4A*yFy}Y{>S3q|}?eZITxB5Zt#0ZCL zcTDZdTc$6c^(=zm2QQKPpXr@_2a#Z3xOmcxl|}V@$N4w$Z+K?0Z%ci|Kfta>e4l#R zJtG`vpMKrKCz)>$lA70MQNt~#pCa9ACtbdo(=88p^Op;4v9(KWx%!6O52|DioIQ(> zGxSnZ@GTTmssPUT#(^?VJCY1YoN#^FhB`h9GKguY6^rNdx@=evLkh@DzSU)uu05|d zuPztGVp!;w?k8H;i_TnxQ(uaIFUHRBb$ldSnSv=ycQpt=40xi>dAzi85(~tJxX|O{5 zY-zHQ0ScT$o487}Sq6lmN;_ixWaSB>?D-+Z+{r|xJSP*)ve{Tl`atQ9)zs(`u&1Ah z29fWUhz2+dP(0WiE^bkvTmO8h;0+GdW>{Sbfo}dm`m=9J{<0?{M^{jsaXG1Jkj>&K z>73}m3I!3Ix?t>}TWc!8wbe8^qGN|z^_KC|00E(@fN>S>pQPO7Hw)lbsq17Sl~Jt{-8@FXapaI3;QguJccuzhN4eR7%g8myP*odDAnajaUS_pw^#N(>+HguKxPY zCx`J|Pi$E1o()}IB_ z0Kjf4>v!Qa!$Yp?H_H#$08jS2@`=W4EqBj&ZGMp(U7@RYqQS}=oYP_EXG>ME#G*X+ zkDl|3fa^C!Ptilq;MPJ2kI7e@v7@v<_cQ^rz^BgUeTw|eEWp)2(*YP8p$Cd_ihtZ2 z^2ozoGZ6df_|rsS(Za7*H|2qPVG)57AbD~?)>AcE*jPRxeeD_(ZxW);R`w(@IEEN{ zq}C#fvL&tTmJoN})tE1oV-N1qp(jMOX|*oWM4+L~{^@NtUMJ;+fsP`hS4-J1QO~o0 zHR7E8D6ln!3WuVTUFm!dr@siw#NZ+4^8KS*2iLT&M027q^&UMbRlEE&S>hM~&_g#I z1e&d>`Qr#&LxdqNzxE4ELP)4sr~nRo+NCXolBb_79)r>b!`IG0i=0E@?Am=G1sfI* z{?OO%uPHR&_p_|1r_98mw)N+%ZF!B7#M&6!pBAa1Vz?U2`vcu2D-SAL7=^#txaFlO z%h6f69ltAhJKgOUK-`&WLeb@x03#iCS^i&m))tBsWk`d~v5j?=uWjpoIg8g69T9Gi zC%g{S-t<)$0TdFNv?mrVbq49O>ySW&K6ude<{NdkcfFcaAK1~Zz(WB|%YrkoFB{o? zlw)Ts0xr$C>|dvDcbO-7^zpbF=f*N^h)&l+S(mCC8Pdb^%~W^%wo?9`fHXe&VTPoM z+NJ5_WlurJAndut3fpKHAM7i^MJb7H0BEEWV#%~5DN31=NepYEv>v^_Dr?c;Jh6Ru zA-F?&W48FB_*cM`;E=gKd2Uu}UdZ;ayi-pRqCX0FD07S1JWgDhovMcML@d-83H>su zSk1(?@bQxU>XNE4X10_lVC^PaPgZwj(0hG^MquzjJCNSU@~fe$B>QM=y^4L zX3u{72nkNz#h&y*;X9(+*|0yX{K_LS*d!&HIcpSM;*_dTM69A=lR#jj%^3l1-(^ru z+pT*jWOEgd9?_TQuT;7#@Kpgb@x4+Qzf>-9iNZnUZ+DgWO~c(T$r_f$hX>aHZ+%IF zUKikFeNl|UFXC=~02s7BuSLp>iqYFIFr5hT=+e*+sU3;J37Y44b^W&3&m*-D%SrvA zJsHbb5`lFuO)WVTD}GZ=K^9A*Y|+S5&BTA)S7zsCQ!~!h^BjfH|U{Uvjoo2yY_Y;}I49 z)YpYlFY|{wp<`{R-1Hzv*rV-PCG%sh#MuRhx(ALEV{L)-g%d>alRbeopvi=d@7Ps` zee2Hc5jDZ)D)}cXXEukrj|-5Q3QS2^-6yj$YJRLH2xmw= zok5=vG#{t0VIIHx>AqZr2T!yv8JywYiERW%y;GW*e3@HFFiRKL z7A{{*00@l0_-BKwZp3)BAIJzU!UNXo5$|BlsG7UN-s;1?BEju8uUal+xaE%?X%RQ_ zPc^qveSwe0-yioEoQFyh$vZ=No7fXFP+0=XnCcxEEKt7LQeECJx7}7a`Y7L5k5VOe z(hJS6*b~VCVJ-drOG)U#zrr6f@fbdn6y(8P4Zs}6V6H^8E|dRaHv8%f4U%&7+Z0X@ zq`J2F>Gyv6uRStp12Us&b3Evhs%`k*X0SYG@+~Qly0!AUPd&`1`7O+qWHYl%{HXA& z;e-=CXdBntRu3!kDhc8jzjRJN)Tlu|H#G!hJ1;YVsaVfSQbr72xrg6#!8(!XR?^iG z#{hH&Zgh^8*NP0C-)c%M`#5=IbVLRMbbPHGq1+M+4nhSuR;Y?hNcd>7lYs&lyKPDK zrOAt>ta?DtG66*d<)G{lDYIXmiA5@@3*9mJisAq20wrM4?Y5j;Xa{M()Wiy8wC8x}Gz}6rhoQ9J5f!{twx+Ja5$VG$QLj z+Dd}*qkINjo5c`Hnf_fH{_e2yNe9jGmbp8u^?JLCzHY4^MN&%Q(i|GESL3g-TtgvH}r zx=+3wlkzfHqZS{7*tw%&YczSjALg<@=(7*($I(IB9Vy`}O2n*6-Khf8ixud^%$e;w znEQ@_(bq%XBTi9=#cuM6(3MuuxdT$c43GxHuh||DRRdN0D{$SGd0a&w>0p2^)F~gF zy&h|}Ke!`!uN8^q53ptUtkt8;>`Op>u0I^S{_N7-a}@B$7Z>biZ6MYVC3X+@< zhVXJIv0;DfuITmmvaY`cqnCifm`oQQM@6|e5>tOID}gbsgm|6M?H&~&Z9+iE7-Ba@ zyEJiSxnevkLF}|!GJ)7<=9Evn=b;BujscCztb5CRr%(@aQ2}B>3H#R1S1K;DM+@kDo`s_EjjiIx&NMh@aPB2E%kj zH>}7*AMWHVQw%C*EI^j8Kgd9^HA;bua`Njd7CtU`R;nrJ^UL%_g}WfR^4&;6@}e{w z8Hdlu(v>uCTR7m44?I8*DyB zXZX2)j#7MQGD~0|V!b=eg%tg2oPe7(sRstrlSD^hu5IQap76@+Qb}F|Nd~G<}&Jr>V1p0GBcAF;=P(h2&A0-3!Yzigb*s z>3b?883tP&*oGbZw*a1`fLh7UyOwo?H2xd=MIZSk(E`u}*~}V>mj%DC%$W3RN_70> z())~|&r^XMZJ)i{_Kk8C2fBtQX7AkvUilRjc@Sn%OG9?zh&rkS{X0RW6x2|!R33`Z zm(R@G&mI*fsk1^e>qYEmE$a5qqmQ0t@A>S*|1)zO?9+6Ape;J zwyB4>zN%3ER{VKKy-B939|L?SCTd~)r9eQTvxq7(ct@Ur(_XEWW|4yI+>G{Z6L^F# zU05fa_dY)|y<2^PB^>NNGDwov=`17y5c&K; zdibMoq%pz378{V4o(vj+(G~z+A@i^a>(mXq9Ry3N!9&D8G`ytJBBU=mBd*j+8mBX z%kw0j{cBnhdm>Si#W-c~OdiSxhVDaI`X%MMgiJXC#`k7t@&2NV#bk`#Ie@|8+Q(&w z?->~H;TRLHUiL> zf)zNVz!Qa7bAQi8bwd!X~`j=5tL%Gr(6|I+`;p96V-?BBbbNNj)mP|o;% z$BHl$=BYmDLc~ij{L3G+KX&yE3}63HP{90xf>MYAvP02Ds>dOHwtZ*a31wsC7S=m+5!q;YY!3;cG`@>6e4|o1eDk?^VA? z7tP)(vZ%+jnb`j6llRMhKQrmE&}lWGA0<>no^lHCxY!%pc6V16T8;)M)EiO{7wMZL za70y<;;YI)2cK48zMGLbL|1}Gw>#+x-X>L)FIxdYm~3>l%!HtWy^cHrivg~(l-bPI z=}%Bi^rw?iOD5Xr$3wbboPW7$aXDVMCd!jLS zM1@pxpod-$&LD_``W*PBwJ~fvqe2K zM?5*ox7lgG)m1SrFjHym)S=23l}LDev+rl?=`1mmKz8nX4zb%D><;@Bc{g%F?;u{)OL8!JyGr~Z-#tTT-GZMld{^p zc+k@zaQSY>R;pTeOaR>)nmIvabJQlV#5H#dv6{?B5%Yl*6w@~3yk!QF7KP8fUG^E7 zZ4(;GJ0Uk>@{kO z{4I5^LsM~^yZR+x3!l)Z%2M67LM|4hRhkQ+$~ad8Im_*#sLJ@JD(Mv05QUxF?^-Iw z+T7O08!pLN*9~(;^g)tq(`heHqU2ZR+@@Z=Gj9}0aZP-aFegB;;(J{*>nio1O+L&;R9KP4&3j%%3wYo$l(H8h7jGod9;i%m(K5SADIBMw9oP8U7J;hXehr(@q`6WH<)SOr+p(0~>ym23s#$luDwchw4m`_cW>x_~qEMB=(M6pXI%b z!59!qg>zuS4n21LOB@|ceDG%AijnA>mS8Ki9AoMehIxsPQSLf&vIdQr z>$0=$GTglR=8b429rpSmVJ7GPKZj?Ju_!m4}3kkr(gFo&KQ&cnN@{VqPO?WMEe+z>yp=HTwA9Y`R={axQFrasqoTi{Y0l1%lBhUykuO7&~|?IDySdB%b7Kj3C$l0%0^ zy*W@MUUIeuLx7P1JM)=nll`s^#^@E;j7m_nd}bmiK#g)kD9ve`ZcTr%qS762@d=pyv)N6 zh?}X<&J&wVxdoHHaf?!B2%sCuu1^e zh=DQ}31qUZ$%qO~%zHK0DXO)=V;V}DsT>W}Yq)VkbfxA#*6j@5Y014*9@N~rPQ7!> z(SsAdy_b*?$^$zdR5>$V&dsvU)t{QTlqN^^-p|>!;oc-TPy@&CfE38vt6}k-F*q5x z6M1hwNjSPr(9ILPlb6`|kVFj=QHhMO91+BfzCa3H7e|2d3;kBA+1FnPu|A2*z-Q0e zx9u?|>7GW%4tZmBA!W_ z@BRdeQX%QmMqt|GJT^k$c+|12(uRWK7!j?>3p#nu(*)96n;YC*X zVo0v8k~jV)&m~QdKbQ&&o0{C+TFuNwj(!m`$&EO=ywK?Osi}dKImOP!dvA1REx@3*9^0BhQKK#5FnzBoIbEe#!hTv{pk$#Rf$~?qDk4GrXDr zZ@I-QN!fc#4MnV>sq zGg4vIskT=NJSaC`a?cnc`XOdvEAfp`JTU({ z4Tlv?sdh#zSL_qW817ub-iY>Lx-CjwfZiPhL`SBQ_VBvB$7Aj~`xU7xAcWA`92S{9 zUvZ~|glbirJ4j@k{GO!;LXEvrKdD&kAz!R@9Nvt?TXayRYkd72g4 zGySTJNiyK36epOt(LUKJknN!n7`#I4BxzAiB5^arKnieltgap$yyH^ZWA0AzSy$+w zu5^^{x(8csksigv(w420)~%DG+O2T$jX^I$_}Rp9;$iDt)7;@Ea2CAhizBe(%^>o4 z=L7ggQ4Xm{s&9D|>@siCHYsv8xG>%#dfSbm+{g!V<7EyV5$)R!T!wO!D-a^L2fIo? zn$gO!NAurLgYit}=sPol!o;_lqn*9U7d#b|+Oz9pw=c8I^)C|z2gslTcaFfwS?12r zAcfBCSoI*0M_UfF`3j*Ly(j8NI4=?L(vJizqxsB3o;vdc9dv+MobDlSr!A<9u`Kz< z^b!KXEr6pm7ulkFT zd)Ev$Fjp!*s@q)Cy6TA?oo)W}#Y^nC?+n%Imd*irF=5Dwya1(QvR(svFmkiyyYyuW z&L;U#A|eG&F6VUFnJRKck=)5j?DW|5gVseQD|5TWDSOAhnvmF$E0|z?sHCg7{Ox$G zy@!+a(&kc%)YDL}1|EQWRVqN`HX%Y<_Y`(KaaJ>LkOAY}41%q(5w8)8j=v^IfM3vQyT_CWPNr`6J=od^uK2j-L6C$c#IE zU|cmySt=yET4CGojp$lmT#JC#m9BD5D#T8IBE09eRJG=@l5+g{Wg~4{N2O1L z5LZ*y5z_Q({nAMyzE^^}t_E%HgYdVP?PS^DZG|efHBpD{X>-=ikwv2aQjaGgqm}=AF+tm{g z(#v>9GwcE5-Tu`dYGYE|`}WLwIt_A~dh?G(jk&A#KAe8&d#3xQ#8rE*#nf8hS&^P) z3&(V?X&8guEmyn3K?J(6a4~(&VmN_?wq?ykw4>3D-e^Y>8J2~L5BU<5pE7P1hBs&G zE8u0ZBa~v=J6t?T&ENVCI(N$+NKJb>v_=T0$Yr6LooArJG=;M zpE!?mmPDd#uclaArC3z!9%17`*SE*I#1kJB`!IXa*|c4$G)1AgU(iu28uF@5^WaEk zT8t;U%c6n@i7d0v4Bxp#cM3n%>nRUDV2Zz%$|SVj&OnFEc-Z8NnRL8^5tK~$YJf!@llX^@-BX^_1<;CZ;!bl|VVA}OQzZfo}FQIE6U#mx}|+9T--?l!Cq>FTOL`HL8r zW7~e41X>7VG2fH0Q43Rt*+;LaqNqcp%B!1lqUtSWGzPMPf}!WkT7#6u%Pp5;7bBWz zoQGACk?1NtsvE`0o_+6&@bh#Jv^G97g^4Pn*4=c~_!V<^2~1y)(<|Cm)r~mBiKKj> zu6o{wzC$IaPo#qwlG5aoG;O_fJh*DjJ+2Wx$Mu%-s`J%=?oPkCspT|@9n7LF*PYEU zR-S5)?e&(s^$MS#)JOY!wt7FciW$h;4t*_?1t42rjbEFB-}Vg8j7F{1rWKbtWapFq z(UA6o(}aeR>VIH^9xt0_2E8d9^|)Gx5;i^9hMBBtgQsc0N$@%5Gnj!_JaR zJRlXjeN({~v}^zRzF0yLpG2H%kP?N76tNVJ?F`%869`SQKN zj8gKJWj9o9P!d|YB4gb6pUG1C9IFkGFL5>#DG!5;fQ)Vy4VTV^L_V8+{A9}p2Sy8T z?6?@4sI6jxJHL2Ys_B|K2dxg@l5l{Tk9gTIhsi>HrogTedlgrlbH(lZ3Kezo=(J9` zZ?>WIdK8b|XkK&IH8(N(8l7B~V{*aeQx-j9l5F8;D0@HC)l84pzdYaD6A%Z-tq`ahvb!msUkv@8wpGNswJ_?X1M&zdQ5_bbTJR9)PvA# zW${=k;hN~A7t94H)(>8WU}uh5Jr92gpjC~{?3dXo$FNqq; zh;1zJ%Q)sC4eckk^F%JRG@5#asRzUFge)FJIUfdZNzIXCUC~ zD74yclmVAKW^Fu>hZG0r1_TF(ABg_Q32aXAKry|-z9Lw4)}IQk9N5qAr64l?67Xo+ z5OjJ8n4dNSX_5ibGx8t=88GiJ4uEIDK>Z9O=yk8F#uIi0}IJez;ad?2Z62~xwgTvk9BR>zo_!Rd&ZUpk>kwgP}>|o zw88&ob{=->IXCtt>CdD5y%a=-vi*Vx0E#$rKyDsPKqSz{`5iEvrv-U&o^Sk~zywUr zfuY-;+f;CDlr5}crPl8d*s>t9hxZ)ZotK1OFn+%PAq23HGGMq2hF-ADy#VEjoktSM zWCAu9!1#ZekuK7KawN~&Rf-7#iEO&_ApbRm{2iwtvQXx4xO5SQ|I-W-7{&&vmH)e4 zw~+EY$ORY3-x087L1dNUFPI7_o&iJ8jrFi6AB44wRJ;I|sQd!SfVm|Y^xWDE3pQY{ zlGp_>Q~exNT4sY@Fu1w^?P#7u$a0qRS{h99G3Y(J%n|60H}o2o@_ z|0gpHY>qOY>lXh*jr~6*@{brX92Z1`hy9rlV{@}uv|G&=u73SZX z#)N-~8UvII5I_L;-)FOhRnPy(2efFwzd`Y&bY<7r@uZUm!6MH+}`Q^vB<;Q{yV25#W6F z4Lb~YIt2!9O)!F{o?@GrfV2rtYz8YM2%U9KXil<1FC?*E=;S{79QrG#l^*&tvE=W( zk_#YL@!#OaBnMRP-}C%0fa}kHfzlx2w)1M@6!h1aSNRkzsHWq*QNe~BSexR7UKnq= z5Gbwd7f21zFMt6@3?pcM;Jm5cf%#_>^q<#I*esX}`)1w&{Eu=zAUZDFNxu2AFP+?n~g_CJKU delta 36880 zcmZ5`19K(}&}6u=ZQHhO+qU^cPi)(^?PO!ywy`lc$=-WkU0u~()%=6$?&;~7&O`9w zIq*h0(3WpUHE4t~a8emQ2oR7BC=d{ll%PG#ltfRM6hAL2;I0~;2Kv8Udrr9NU#im7 zsqKO791ybAnysaE4nfw|)M?7db-WoToc298N0#8ibj5yvCykouL@S5 zSnn^zY`i7MAwu$raZQfvHi;-c6t|xk=eAlyfGPpu^ZKi5&OIQPhU|m<71M%DJfwXmPhpUU6;D%{CY5wBQmFu9?082n=M6nqf`kmlPs2^D zG}*m+h#`%7Uo5{SYv0wWF`PrJFg)f){KJA)13vCqptE2XT+n#%O_nJ%5`f}pE49PG zrHEIE?n_4SI`(7g^H=Q%vr;uM82m)Vn2J*bZq=oF1#V_BnW&QQVveB#C%zy zh+*d`&^Vr!Z_2nHDg&h?RJoUP49gaWid@U952KeJ)-YWy*vA#2rcL|BznOoF?Xvfm zlkeru`@~DJHnjab1*U`grs--C4PZoJw9JzW!f+tZBfCi<`kJ=oKx2~1PA+}%h5H+I zVJJYqbs@SBb8ErTgou|OUqYzqit9c)3ysoNe zrT=(Gm|=C>A6EPtgk;f@hex=h+iK?+(&d&>XU)9ykR(8)A<4Z%hkQuKL|!%aZ_zv@ z;8{CwJppmWyXRn}nX9!_ZhPGd<+-1e434>NTuwL(V}xkV`T8mRsjF*vDnWMNB{B-UE7S$o)*0PP<%SpS)G zXR351W?e8S@6i^nRhZHXfKyQo&n6=p%uGL1LP?Qh!nrGE1e=sqt3@0l{y;Ez-A7&X zO5J@6eFp!$CsRD28fBEog>eYkCC(RBZNd{r(h;*=ubF#UG>pJZXiLoAYrs7PLUBxO zW@cU87>fAMxS#!c4h?{liqzpQ`W*qD>?UdyCevybaaT+ddZ{j2LAxipT=)Z1%y#<; zi+i`PasV#AP+j%_aV7mDFYz6Q`$QLi$kqH$hAKen7tk02Sfw71)+I?=P%iG#oaz2( zvzmD$>7$c|uEv*@TAo}T!s$GxAGj(ESVT~nwhL}f-h0rNx$a*C49ApQZt>M@v_zjF zaq1GOEdJ~jt&ZDSyYN>mh>qy{2qsXi3NaZ7PN;>S{Z^I(hkykEfrbY8Pg+10yA^Nf z!9hRbnG&T?UcRviBiI=- z&`nCNF@ciA?;KxM0W|}X{xQ=Jg+ubLh5=Or z6NT3>t1izzhQ~16M-34urNRjBj;y9}>x$zT)lOVrGIx=B4$U=8Mo7DZlkDlLx17Y{ zc&@vh%*fmbHWLIDjwc*LpfM!Epj8kW}n3!xu-C;kGmt+ZE{wz;lJ2iis zgeZYmpc#!-eJ=+(lFRv3*R!|-{FAgE^>`|2EbrZsEu1aA_sF=IhN5Mrb!oA=5H#da zCPawUlrC(-ZQ*`rYDE5KbDWA=Z*d)~))-(Ht9Y9s{Urr}a?k^ZzdZ-R8R^VtLzUG& z^>-)i0?RE3{lzZp9VrDyD3>{i2BU<6!c1p^_yiw!sv!?NU2#`sAs2s$RfUSSkd8Dl z6xJUC!^E)#w?F{(LcV-;^PxHD3bm+pwq+u3EfE`_6+8IXPa`JwkjpA{o%L<%yhuNi z%Edz=cOP~O@1(oAr^kyrzT`zDrux*w6l_)*J9d_#h3&(b-4RORpEjx;DqmJRAxd54 zKCe7H-pdUP&L{hNAgMcISiFdN@m1@cfD|)0z^D)QLOWQWg^-stPs@X5ec;xXkIVbY$)DG5gO!MYve$|Yph12?7vE3a(eLDYdfc{pPZ^XO)UekrEpu@8_ zxDm+_-_AQIQ9v;f6!bCH(YwRLGg=07ZfCe+w5u7aK`G17=b}P(92l)jW_NQkO0vn=L`@*rML*SAV5c2NcKohw@)f2m+lo$ zOr_CD)*46-g)}`J6-CeEj=!CVx~2BCr*g?Hcg*>(xRY|BQzf=#6DI0*IWJM(q_>}? z6IvCN*87qh|G{Gt3yZccH++U}W^p;~`98%1Ewy}(rL>I5iWz`kwGo=jcYq6Q{~G+h z4)LeI#UJ2dKMP`xpJXiW?FXM`X+I-kK-qnj%X#!qHzbWEBkv2Q%O1i{uHLSaGvvx2PQOw+^vEWxO@d214; zA${N~LZh`&;v5#yzSyC@c7Gzfs9vSdkk28iS~p~vIU6J1qRlQRu11wzR;)vbTQD?@ zVOaDii*$RgYagmr)w?A|1&EspNOMvJ)D`Z*S3L3}ApjqJ(zv`Yu6tEZeZsh4R70lX zXt`*fal�@DDK4oKRRf>3Ss&^)&XXA34)HYI?;^^M=L@M15g>;Y2+EJtpwwL27$YoL4)-<}C?c{o-!iC|w}1M98{zianUmyb&rNVcQLq_#6@ z`DVPA){b8j3FUBTy;wC`k^b!OZvtu2`NL75j-!LqUUB_n@Rhkko49YpS zSZyzpG8b7*E~7$C+T15)w5yGGN)qto37^B^>2NrX7)u{%DOjw#`-1qSe1LjTJ#5#TJMh%ja;GT*+U$CSOI$fCW)7(IG zYDW1bMl|j&gKnFCVj1%AB|}k_fE}V~AOEj=3w^KQ9yAPz7GM26aVWxVMx}h!4Gh>~ zQ~#uefh?Nhq9>$f>?RvoCHB1aV$ROb#m0$&@a0Dpg?=CEW=5V6smm;A3I?WhEo1g~ zZO&VJZ7KYdlt&kS)>aaMuVvzj^M(bD#4vx{3Wtov+HP_t$cT;HEnRC8zD~6EM!+8l zGpI*)q5VZSuNxx%LjbK-es*>(8=x)CH2e56`ttMv;{>vRG06F8_cCz z!%WaTY%e=MLd9E!2vy9)^gL!jrkW12aq(AyAr!%5JwkDDS2zEKIIuRScxvs)mKAj5 z%9f?HV;82vo}ZZ2JQPl?DJ+f|1tuCVnXmx4$X~Pwbtw`@Sez<}q!wM}018CcdtWAr|8t%= z2F_pkfK0S{q2nW4BKMp;=Jm*Y&!*RUi|P|A9bf~__ejD3l${Ww%s$55AEjdNmbZ!9 zE95%VGI4H!FCC-$Kv$fBvH#FmGGB4s~`Km$Bwa>yq)8msf$B4 z>r{n0h@;2b*}DeYs33a#xLwAz@Yf6q6;XWy;J}A&LWkaL?OjyOa!@o?-)qOvau?on zBWJR`KqXctcE0SDB+Qz9Y+JHLo)fgsI*pEH`jy!4#2)@ixpT8ET>(4TSUCcO`e^LC ziPG|oByfQsXkUTWg4Df_g%<+k*%54Xf)M+Rfx&@kI%I!cXoXGoPKv*mkbgUTqIIXS ze~D5VRIF!Bng7`Mto$;w$@EN&L16@(5Gm^SJz0sT>ss&6YG^ zff6+SM@($@zK2V#8??fm&0Aed`o`^EpZtRj7bpr61aUE-169vqbu!@WVaY8hr!r$s z?8fW8Ku3Iy3T)J>RcfzQ`n+DJ+CJG=02ssxO%{OVVu)lP^zS%0t%?$NDpgMK)bKbiENDjZn4~f*?Yby*?nyN&%1$w}Z#Mam~NBrDiYzYKVqP8VJ zk^ws>^QjsJWos?{$LTV>GZUs;Q=t3+8Q^ev3*=W z*j;cmn@`|QZl1K8EWd1q$|c0ptY(~Q2~xZR^(bpSO9G#@T@?k*cirjDYQ+UnR{Va; znCg{t!WJ%a70d!Of>OlmxsTR<%MrK0hMKiB=y;*)xcTj6`_ViZ?xgQpBhE#R&1YL7 z1AiMxRBysU#9{zMi3lwjR{kkM9B^laU&2o&8lcXaUg$uQ=2$eoU^l_Mbo{LH zk=LP5Yr2V6Eo-hsO_xT?$W5Cc3j1K;EjEmv#?Ee7z7&2oePVmx#i*t;%-$7&ms2j? zB~ObzW%bfPDdC3|v8G4jMW?aIsVl#I*h|ow?!>Q!D_(e$d)N+s4C*EE;#Cnbjabg z&iIUnQ$2));~t(REUPV~xwuv7>+GhMXS1&oIw1SL&N*_DbrQ_|5e62Z-^|lbI@D+9 zms^!K+kQg$qtXdcq_O0Q{@bi=1}?Rnq!+mBbw%h+<2Ec5vokn)C@m)++En9(AC;-K z){dVmrZlXsE?l@NEV{l2NYos;0kl3-+3Oy=Y`KOOIOdk`+{!M?*P6F{d?Xw+My+o> zl|JI59|_X>Sqrkub0Ru2GWeCoFmyA@Ukb`8m==HCjj^vPEti`xm2w%@0Bf{W0}S_7 zRJ3Rc(mc{)s%(0{vgVV+Y34ty@=H#WEpICA-)XDj3Rg9H?K=1j#Lh zI8>l*_yXB+#MvX5ebHC=Y_K_iTm%)&yAovXU>-o!Ha}P!AFd^qSZm1(z>Hs*q)qnN z1p+TP9_lkecSgS)rauy0e*#26&e$bE^l&yrLy0c{O$VPir$ad!*s%<82NoRp1;7L} zhXtey#&N*5?kmkNS|ysmT)e{=OY%JDm(3TS2fdQNKJ#lBGl~d>8B)AHB7E9>;+Svn zxwH8rCZi{5Z2F1MPf9(rerONzT|Pa#sl(!J=;)1YUqF5WysM&fF0d}JlJbhB`(-fL zlr^gsY{NY1o*~14{gn$bu>LYdOmyz5+B1y8bMUK76Qjx3XWcm14d|!f@3Y1UkSQnf zO`>{aum-uH?JMYJ&Od>Wv+$e%G6KMAW%Rrh;=m4)5FEB`e);Y&g3Z>jwd435;@9iZ zdc4s%`fbk!hr-{8J}BIZivtdwvjAfQE6_)Nh^gUL%neE)m_F2?UDNeEi3J3Z%^w4F z(l{|kTdT2K+|Sch`9cEf=8augsP%aU=x!QX~=Y4_w-No|5p`I8o*+@`W((pNq9 z&fSDx|B3)HXyeM(a)wB5`C*NFKU%nPKPB`H|^P z9V1TtGME5L11dycLOk*NH+Lw}0NZiNFYj<=McOH#3C=5?aI0rgYJ8#crwXnT=Wsq};!VtgMlaZa#3oQGLEaa*QOEPA0-~ z8NK>#W8__OfLq2<`fLB;13e@AnK*;>p*yrw)$$1m<;I2)) zF_cZHq%(kr zNsaTocAvfl2a8er%kD9t2sAlBAcF>DM5vU~=1xYwU&M*mGXPIk+%*3_erSmCF0+-B zw)SN&q9tK6DFyN+K*mmKi5k1q=uX0wxFu?f>L&ilDeaZ}f4DAE><6 zidE7gv|DRIGhr%~P?}#cmLxDT&cwoCq|Sk-tVxeLoylFwu-(bMF2>uj2dv@UINq0c32YD-jdx3Qn0HJxq<2klN0l}` z=$~W%mYn3=1M?Ilbbq2`7P|GT8QKY6!O-Cl2=9(SnF;))j)FXf&2 z_|{iVb=t1RApRtMQ|7^`+RWhwpBTlqdQ{E2J@1Fgen4 zjsHgnU3%*sna?` zkZlH;C8tBoC_=p^k`5)-ZDvw3I_M~?LJx!FUNp(BEY6p9W`h+wp`9%U8jQduQ{3+D zJ#rHEfg&^P zxYf}SV;AgthK1}qN8#{oKS%f&`iSa~v5C8`R>cWCe&=M%yK(BEAOEQmXeSitQX_J8 zH5}xE&2Y8Sj z4m`r09?qMAvr7`z7)%Eq?z`@r!TyvSx_oIB5tEFdOQhNiQB7f}6v0TmXDM-Ui#)G* zE8Zwj-D(d_FrXT#+lr>?zQ?Np6x3l9jNNMT<+D92_Y0F?!kN+2&K)CvU9xwH6qL~Jj&%%+8O!_-uX2EcqH(TepOCn1PyCtg>G$-`;hb&T2=aR+V`WkJ+T7| z4_3xXr>dDb6SWubqj}wfeHLIl_h>*=zA;o%})4#qd(`s1zvY#>Hn()KW>+fcGc!>GIqS6KY&Mo1d&`Hxcohuvvt1M7qCqB*mY;x=d zpR-z()0o@i_d+QP+*1||0d<+G&Gt-xf3AF5=~2I$s?jCZYhDxKNfpa((_)OE$?cp+ z6-CPNNlb9GRl$xSR$F}Vy4DE0Qh9`T9}JQGMnVqBzYK_bl1OMzQDZDCHRVx&*Pi~d zpVwY*_>%zO$>#nkhYRQt-&e~1pmpET>w>Mu$V2~^Ta$YP`PFSj{}=uM+PhwqAU-)3 zSB!6^I);swlrp9cFW9~!;%Seh$|4th*(TPr#eop=bDEK)g^^BSb;Uf^f)8tEw#W;2 zmt=B<%wv$simO#&7r!>++Zv>Q9FDwGJ@#O%B}HYTE=zQlc@{8j@CH6uq<+Tc73zCh zidjqXj6;hNZ}7{Kqmnb;RK%#!6+PJOpj_A*n=8T8#Arb%RxiBC8a)Q4l|1CrrKN*x zpef%Fae3jFx*3h73C@`r&zdnLZ&UJKZ(6+e?D?!Nb|->W6QPnA+n+JXT&q|{W4QXl zRhQkwYeM{?JVoG*7@?g)1RCq3TYZyL-Zn=s3P0FXC?`4vzh0)SbLn`(b2(9{RxJ?Bcsa(^+^hViRvW}K)8^vE=u&5}D11d2|C`4)A4Pp*S3)}S#eUOi` zL!DILb(&9swnW*6z3AfDUtzmGr$x;=DS>OES2YQLCrhr)U3wapg8830ukXFT{cnF2 z=DyF1B0y`0gw9OB=^c}IdK2+-a?rWrV}Y2Z6!=LfLy@5+tmdTv*I22JhScZcoJr=#F4W`Lj)7}VwU=G{lKf93)=bQJ4kK?@2 z8hX3%-;NmM-!=35>+n^2`|w$MLvj|8{~VusO?hMOF_fi^pslbap2jzYMr$(z{IFsp=X z_6dIdRW?oPIUJ_A>9}NLphLQ{0;O#jW(qE~<`JIaY|shr@32a|w|Gg2#T0`YVfB3J zGhGeck)CQreC^10UxZR|h(NDunGA`c3i90Kdk`Hy3m@4bo1kg8Id`&dxBEs+i`^%m z=gul#o17JgJefo*5DO`Ghwbc3aox$N+WJrd*Mzk8lI-(Ref#XpSmBo4SX|BM!j&o> zF-`fQ3Jm^87RYUvlwgI8T4Ok_`QbR&cGH|-%~qO?Ao$5CtukZ$9w00o|zKEFFt4fX$$LK0@7$G1H%u}DD9pD(rBCqa({q} zg1`7c&s%-4gN5dh54Jz%4hAASx%!s+T`)jc@)Sh4yGG{_iQ-FoFd^%j>b)wofaaYi zpy+@|SsJv8s;~?KD z#kv&VF6R+G0c5dduP(bN)|JuH@SD?8K4dvl&T6sE(Qz2Iw}s>RVCUJ>n9<`+F9XDw zWkuZNo6$&>)a+Djdj3E*#VDRFe6VvQS-se<^#bOy;YgY)LlH$)ldXOo0$gZHl>KUG?wmJLmJ-MJYT*8kA2h&0jUiXUL5(BV;zc}z09DctEp01_(DcXA=Zt=N z!BLMd&ccLX^1+LtbOu-rLtrM5iW{s}Eo+N@W8j2F1=ti&wLS_;|DfIfE;*T76y$Xh=|eCw!~f@xXG&^3?-kHObd7QF(^F!?r>U1+jH18l&d`-1`d8Eh;IrOH0F3Lqpu<=d){zot?qj{HcEUveNJ;3K>Iu z)}(~C>?a&k0k(Kb2kq&?Nf*#j&-CVpz%;tPDkeS$M&-=$_Gkvuu$l_!p`cs>!AB_p z$WWy7aPulVhPWEa#_*%2_W8VA-WZQ>{%%uzE_u<&5K@xlXO=rJ77wm^mwyo|A*-Qx z9HA9$7ez>a{n_!zd&gn+$wD8GL+|)tcBx?YD`0jLQ2U9BwowzQ5`s zDA>Q@pnl?@RYb@*gm;;MTU{vrB*Ur#pRlga(SKtOQ*d+7pY_@7J(_y;P*pn{ZzA%M8p zPfm|k4>lWl6k8^iQi6$D;oea@yt(mv!m;Lu%&Buhym)a$j_FHfjIBjZFQu^p9XLb= z?s8(Gt*A^raW7m>#~bac&ygrBGmaag^77tKXUJ(AY|`pCvhOx+qs@e`#a|8wwd1m# zzk%65U(-s8R_hGtv%PKQuP`jr@ac>m^wW6L+wtf?4D+vD>b`H+|MPdqYGcMw{d@|c z!4{IT7KzY_Gt4S1HOvrqb+H}yFHqgQ_GV@D+S5VF+vy{}MxVp}k7noj1b=Vk`kefA zz;-*0$;L$fopa{Z1C`>gjetewT?o8&wWy_D1fm9hdGO&o#vu<>{DxJerQ030Dj$O* zk<*jqM(Usj6h7EsmSgVkbb z!=_b0Z=!h@yq%PU((x!gN=2W|gw$e4cxeRv&y}{7c6Tuu8d@jAuvobJ`gTqqnMnsW z<-Y`#9Q8r?J{1_B2hs^?DE4V-K6XlKDz8E`*-c*dY|t^K5igw|yytmY4n-EmyG9uO ziY}bmA%sz8^OO*10B0bx2QVJFGiu_O*bt+I3hsF2mvq1fx^BO7^hE|Dr5MFPc~KJ% z`3Ak@X0GTQ1grcJVxcAK+=@IWUJ;SNN8^ke?rER;4_)j2LYvY;F_k;X*0D#^xE(FT zehCmh>4YtvaFAUC!CP}8Ch2xV#%TUnAQ#(`B4WfX^EWXi+c%PZqm(yj4EV@mP1K#2 zXD{7P{Dj3(Dk_r)XOGqlBO27-Tn(0p)-C@}bCgidtN#y*C!S!=CFmd^j0_+k#Qz70 z2*5fW7(eyZRUxXDUK4Nns8D8mL}_9=6Dcs$6l8L2FbaBvl`FZ&0 ziRTlWvX*9GAMFLzvYivp?{|m^4q8gHDqLI=%kTY)j0#<30_8&7u`+D*2}>{aQA&Hd zRDCd~^wN46mb_gOhwxz^Q4MiRc?~HGrpt6lW~Uz9W9Jd?Y2#`vygd^4rtM0fa)BTX zY3E%c0Z{7@otlzY;-4O(+C-(KhC~NXCtV`P;ly<(akvFUr5f6Zd2vhj$>lQd_UTo6 zU|))to7FBYW7}Z80t0-dW>$%|9|lXU5SM_xvIobweTD`sh)cf!r9{rhFkB<_(%B(@ zGCWYnv;D z%dyAVKS3`wtPiNI$FUkAdwj9O-<`ApH3wtpY z=fGe7$GW&>xi!<0ow9bWT5;MsAzMQreR0=8S0m0AnGO6lD&L#kq?K*flgwT_gD^v* z8=7wMR8$q=A^JCvb|9hk|!d*cvOo}45%`;t_tv%F?WnUWLRS!7=0daMi_f-ycf zLW`2RX_LRB0l@X`>87N_7<7kB>Sb7SWl?fS9$Bi;Q@aRuv=UCvjcQGcnW@&cWHZB8(iU^YCQY|);#Qi*~fXC0$#j^IK_14n_ zRaYmH;8;a z)?~RVy=Uf;(_h9lK#tW#lE#^BgXxn?R^LUUASb?+U;qomBz=U)lD(V`N>76=+5(Jq zeT;jy3|=ok0a(b;O*O*9pVYWCc{0kywJoSSy!JtM#2W&b%<{OTYE2o^c*pj*c7dE& z$1t|IhQ;?1Tz{S81Qvz9l~z&$|1Qj(Y-tdk^Moaz+NFALpvFFyb{r6gE7Uhxx#>hgC2ozwJ?Wl4*lM-NSgqs(eHP zDjm(7fg!7AQwnYT#F}aR@&QE2tRjvIWSYyQ_>?W1hHxyGZ_g~}w!IcvP-TcrC9dbV zTK$!v2%Iz(@Rm@fdXY@KlxGQt>n1g96e}-u9or6sT4uTa4)WEWozJiZ?s0&$A2gqr@aokSA|^Y{?bu`be`o5la9ou zixVS?H@7ir3C9e^d;}i_F{eePhxZ!DQ9=?=QK!xYjLny09RyW1AjM=miWDLchoK|z zz%_zPkxb?zbFnvDSXEQ)hO?zeRtN`i((l3HpxD)Ad<88^m#?~oR6U|wScCMSM;j0{ z>(I-;2l<2hjmxMw`X*WUkX7%JehpfH(^1!7iv~-Y-+)1N?y+Q_a@eK{^w!8nY%my3nNOd*;}4sx$gz3y$v|vq zmethp-;$m{sCcl^jKRul@6IH!*TmV2ijs%ySDtY9i6vaW^m_KqBP@jS@X0bC`t@>zd1DuB9X?;cVP*?sdmjc(7gh;zxdqBhqT@{((bsug#=C4C6eQ!5Y!imRs@aR{bU zQvaFz6bv072{H}%>w(j{bK~^0@R@v_&#-IA8K0S5HL-l8ZzN%hmd^-wZxI;LM0-+xJ@tA+r#*4;&I@^?w2X>yi3Qw+}kGq^l*fg$33u&t zFv)^B{kh3-V}}NVM`G;mBucVokC)Csm`Q$WSdtW7Lfy!Tdy#b;K*xqpq2r%kI-goH zp=5yd8vt{KGU-CW*q#~#fBrKxsmM_`Db8+@gL+p|elixbd2RmX;%d{j*A+bl1uos<-{FyH21CH7jcSUPAYeNE<3Ck>6bq88y z17+pv=bT+Vtv9t#7@d9HUZpB?gH~yCI4FG**AOOwSsbo5`s)kGL6&!C@o2`~BD
    H1s<54&E_H#isXY9bXwNdFFO;lJK4?{sD>)OVB6l9{ z0zTr|CFtY`^KGZ!>^5lGlL{7Z3*x;eTE1d?0koB`lgF{PLZ4KjUm@&eW%MUVAGA|+ zP?e1Z<$}@@Uh&B-=)CDcZjk16r8up~$xN!Qj?S9XQ(ftfg-Rl%M0LgA&o<65ORT_d zl-CzyM;!lf$8bxw!gH%jx58{WwoA<3k4w#&SJN|x%#h>G5~TgOeh|r5EO|cJB83q4 zzB0c+OLl;!_Y>7}2iR)AsMB5@dkgEybww6kZ`ht*_>V5iBLKGOq&7tCclAxhCX`IW z>$Kd&KVb=>Je^yDQs-%q*{WB7<~tDXo8+!(+eSqYA*?s-r;LbO)~%*3aZSkz;8gNJ zvVS7+&F*A1L1i$?*Kq$dSVU#s_2DHtA>+=O2M0P|hWqkN-Evzf+Df2YgNBYGd~Vw5 zjg%*&qASdT%^DBDtSch7)KO13|0Sz}_OhUORi=4W;t0~3um>mHPtW5eJJpLwzuUll)RWur`>~sXKas0ikVP+qK6)-AkpxbOH6?2C& zcPxIoz&kh^y|TN^ai@BR(auQXglG?ld$mp0eMtwEd zbgmBX#irt}QLW7#u$Abc)qWA(P1Hw(g-s;he)M5nNGq(07K4q=3#e|$cs8YVU#n^^z&;EuHmrZ!CLZDll?r;EN7zR~x+i1p zED6nzNi){-&-&m$WcUCm^zUB^;DK1+Lp8*@40~K4p*ZJ^4-U%1p6i*J`G)F6}1 z(T?(b{DE0VXT7y)}&<}(u#+_Z{RE<)F)*eDJ5jEp@c;c z&KU;XSYzGevlmEyG~kS-AmK|?D**j>HzDr{o0<>;;x|xXHBg7axDAcD8(Mo4+(}yEw8+PEE#qviS(2D208?}1@6DxC%5%NP*yC%^h z?18hUFROQHqwL$X_wlUuiR0&nU|mpDP~s%S{}*1MX5dE|6CA0kQBnjB{#!Jt54qBZ z5$@Fo76KUF=tz8Q_O)uGUbFaw>#&lje8D{{SpYJaBPaeJt;zYQLI+O zsLw=CfQtI$Q=*!=OmsQKd5wV%W7t-{cy64SYUH% z+o@1)XCaU4vO#G&Lbav))IyP9XcJFP*_DeC~FU1c;UE3dhi)IK z*bk@+)u^ic5ca{S`uxP#fx#lB$>P2x9A&2>kjJJB#(_B}5#AX47x*D6Rrx=|jIB&| z1C)Vr7fjgScnanJMCtxa`taHPcl5UYBpFn-zmE1$&ei|6K3)W97LPt^QhT)k6xWnI`TnvQMTwr$(CZEM9& zIyO4C?T&5Rw$tIx-se33Ip4mRcjK8?wPw|-x854fxY-ks9cr++HEXb%p?|B+AaX}#r{*nHx7)HPG}LO{D;|!g zY(OsT4JtFY`eo9Z@bP&gD*P9$Nfi0o8L|(Y0v0n2)7k}?&pjeq1(<5nHr6v9*0czO zI-=jD=QRV=!}oDAcCIr{)Q3j-6cB*j>I19%3nm%b>XZ}{2LxD+W&h?uSTf;NqQGE0 zB-S^Dbu4ULIx`7(iyU0TpoJWu8e(--{;`?sD!1^zMzLdFmrfa%0?lLkf)g3KIr5tR zYLo3V?#PJ(l_@)O(7qqH-7o|<`3r|AyG~Xt4uNi@bl`^O*(SW3)7-n3mK`k{DznzjG`;3|D z@+tRe`I+5EHO$t>sGOR>X}zNw0^mMKiD&lbs=2?-rYUz~I3v|)7-BTHCeto8EzTL` zG`@u|lq@qI)u9uj+c?r(3hvl%Rs{8k;>Z#Pqu^~{FMWf9Q?U^Un}ze&BQ;Pt&Ys(R zd(mlJl&JlK6u8hW(Y|d4kMgC9=zcI^yXdC#b)o{EtEtA#Y%TynIryc4HQ2l<|&c z?MuGrJJi-_ew8XFaix?y9fw3O1L42Zk){(@PmZ5fjfAAy0d{~4wimB{!73EHyJEJW zsZ>iQkX%-J@+x!W-v&L0#h+aKylAdyCQ~D+yOoU9g78afjn$d3Mn^Py5o&AEMC+Fv zUGFvzOR6mq#_OM#YoCsrO_wx}lh5}njvZh}=>Xt{nh-6TosJH=xm45Qm; zaE4qYBkB0MX&wN&j5{X$%nDgVTi-?&uWjUOTS%~{dOyXj6jYvZs1h&6a48w@?-|L_ zy++zC)H|+R0R|&ThTGeM2!@DPV@!iYM^pp&1DdK2HT<7&kAcM*4zWJ;K^Cn)O^ieM z9Uj!q&`bJYHDeFKFv9CC9TEc!C!@(>in4a2t;z&fi#gyF_%NoX@>o@>o^}b@@wLDI z-{a)t`u+Z6|N2n-<>UJPys-ImP{8P<+EGU$HI+A0vm}uBT~$C0l}M|tSgS^IVj=Wt zu0gtwtG9Kze?_Myb!cLTZTYcS;UqSGjCm3MoHVsi2sx`>;|!GL@7NkhNLef}<%^6t zSGQ%V6IFnV)L4pY%gmt$s>(rgFIL?On=v1=N%~N-1-09NDm%L>3tR2##mQJDvsI?) zsi1AGRm-x9>Q(ohW^{AE_h#1E295R{6WTH#&s&Z9?_QE!+{O=k)L(Q)+SsUw(^RS4 zm5zi1U_noV@(YZka0@z-9}v~PXNfIDhV;+Vm)!sa0Uc9)*U5RYA#I9C<5Q`Z=DPWN zM-8oJ+Tya3<^tSHtBDB7*7tx;zGAPv()r9V2`lg9IN(qK!;cPh*Ou>9qP zq{d#sYRPY9uzHQp)Vhdw7vCAYhhLi`c18iObp90kFU-e`3K;xZ{)&SHrXNUXShaf! z%wI7pV2|yW;~^QDi&0ox>d~@SaO&xTDOO9MxVB2afWdJ(VdCs<%-mM;L{!35s72id z;%Zd-#M%4?>I?P9VpyK-oU=lDcjd4)UMpjI>o+!R>EG<_VNUI?bC|dL@hVVyspbJW zZe<8@7_2ITJ7yojC~ScM_}xaaWF^`sSujtADj?aeHnj}P4ZB$>YXIVz64#1~`WM&FPvrx1a~vLiT$}SLx&ZaAPSLB$ zgHr}x_?IT$-p+-cEn;b_DCaT7hz@`ymByl(3W{UV)0Bg#>~=T#=J!H-S52Z*Z@gE5 zuk$z1)y7-@ra;#jJ?CbsUkMClSuPrw4PYMky#vn9JaR9Jv0OAj0r97wP#HElEwHLtJoQw~Q z??l5=1ID1&!k?Hmop(g7e2!^KzU=^a*xg`GxL*VXYB>8%U$-@GiT;Z$Qt%7-&5GDF zbLake)U3w3>pzgmd1R(K&jHk?1p4qpQg3L(t)sCK>$ zODt*v`=NN94@4jyR1iN%KCC-K@11>-yH&KROZc1nmiE&lQ_*{uP(&HPlS8-Add@0l z9`#622nh-$F$^n3G+%U+P5=|F{wq^qFw*ed?oq?3ENq|=&VcvL1piZ71zI6-PKCa^ z4ML^VqraHA4H?DE+rwzNcxBjtI`40#Yn(!y#C1n(AT1Vdwp?O!XDd?^HlgwW8}1pO z{t#cvC3{?rxHzT8$aNcFRD$YEsoy5#hBr3pl(f}{YL6uO-o!NLADNJyp5m;C%RBw1DD zD-pF#4W}Czm#DT}KJmybA{s5%gdK!VvWwhmor?jol}t(Ov^!9M*8SBNgx(|X0Yrqu zix>A;dN67!${K0u@wUr=DX*3kJVb5{@p7Gi1TVN37O8;QkJjvpL3Ae333dYy)*M(c zC5iB-v}YP~?M|2JoG1qA2OuFOm~cO?-ko)cGUB$m5+m0VJHhO|URYL=5=(h3O|+6Q zyWZeMMk3V@{7Wwe7ZfidiRrgK zAHUIBC8IrO=~UkS&c}KK;eCTN?vCSw!By~;Bh>xq9)TZD{eQv+v9}OOT(>BI*d#jz z6d}YBi~Ps+18JE&P+wp)D}=owXlP<6Ni|tC5isPPc)IK($@av~N^*Z{Ia2bTTaefM z7#0thWV5Vva+XGxhrG|L>!+C=gWg|%fG4V8#j6OwajFf(^7AtIGRxKz+w}%)gOWmYr(9(mTPxK| z_emd7u&_sk|0vA^%+oXz$W^5avRnb>c`O4@4N4W`DMN|}=AeEPLS1eRSIuXQ%Vu3S zLf0Jhi~Y^RGW-j!cC;xir>igck@kj-w%l)mCfkeX5e0eMnf2I@=$3cDn3X%PJZ(gl z{d(V9agv3!*}qFG1AAw?s~5g zJB>w!=Lc&pr~WU*6`})TVzlR1ajg!$uBX=$Y8bpSrA&VyS1@B}@I#NQ-g^9EW={5& zde^n}YSms^E|#jhSF#)cmEfj^>3!+K9eZ!T9GWqUrkNj8{(+kstp@OB0A~@doz z6B;Rbuq?IEbw3}Pq%1g9;qc`zVFVEz1!^HyEK_kPX9Ku`F+!KJ@<*UC=y)o!(Lb=v zwV+hxLf~fcVvDp-ia=vf@eRnNM8UioLFv}DL8ZnC)yPIc*d&#~ z*951s@Cm_wF{478Vqerf4;Ne_5p2HiD>{OUB||aG9zUWfwJbZr-(s#x67d+I2P~9o-h2 zBOhhm^``sX;&9>FYn!JiiIehE=*Bzu<^A%6ulxQ6!vMSv>^t!;B1S^QQ&tFVoBF^G zI*7AWuz%yWsl! zU>dhii2BL1b-g@XoU>a@tgBl)9L%h3a)Ul@G_7uxGVD3S6FmB+qq42Bs@Jj z{U7oyS*^`gev%WYu28XNKzQ$odiWvE#>P9lLI_S?SWI4yJEN&~8xn~k-N>nUXYw(m z5bqEj0BR(@WN1z@iwv%<1~@(jqw+%UK6d7-$bGKz-amtGe@ElLU~uIJY)|-{7K-0qYPq43uY)%cQ$xYVbBNa#n2x2Q(`!vu&D`G))psF?-(NH*D4D-cz0J z0h3e1=HxXsFGlLqoorXGnV7=))P_8vX}@*a4-i2i>s7vximjX_=eqDA)=|JZ`%dI! zw6R*8aZSy?;CM+_%>f6)YK4V}&B4V5RN1#8z`42z-kbMHrwFSRP>O-wGarwN%B;)- z1LkoXm|clXtj|Iy=!C^nk+oLHqut}@w$^m}+~YhZUc+M2OuJ;S30CZCy^C;ilEL(a zdEdTGBdcH#;(zj;5)SnA0!8-pEwaOD%SM^(XQfBpY%Ma4wp++f(3Z=bS~@2r!O4uc zl2CH3D3nHHWe8t%hA}wN~TOH&*erJ(nB# zg!#90_0i2!U7j1rvj_&&(Rl(|#r)E8$~Z~LTvJIb4XRqodbx>fwtFvQy0uDDF?62h zwxR-J_6A6(sqaqrQD}#H=?L{(o(*=7&ich`RoMM3()HSojAuIhFjZfio%~@vK!ewx z_~uxtZcS@sZM_^~vfEKiDip0=_Fn!(cB?G5s|f$h3Ja1HIjBlNGEYj>&At!q6*gNQ zPZd5|?ZAZ^?q3@cT&Py>hd<32N)3$V`xNlQ04p>BGxjGUlC&bq)3}g4rl%7#P>5Fy*;g`@bKed! zuet?V)A@dx%>v?5UtH^qr2c%IHI-gvrx?3WHjr=dUbQClfYP1DhurXv+6TyYa$W5VASOcecQ*57 zhiL@&xi8KLehjUC#O+HY4%Iy;Kq%R zrm9vI^73nsgV|P)x4B#L2zEk~SAB6uP5z0ia^t5@Xlo~Tg+-FBEK)BWzpd!{=GjW09Q&&qK)lx?^>KWozXN^e zcNDZpedZU@WFbJf+48UcrbOq;Yz=glW$#R*f%rT8j6c{F58u05)6UHr$3o9Uqd_!d ztBrPeyGeA0H|NxYlskfVWaDr5oU(khSw;vePk6O~ddDYPU>8XEnF4e4`=)@4(m5mD zaM0Ka7bpkB*=9^i0C1cLdeibMtniHSKV0kZ0qI?`Nt~KEvRc8Mp^r*hT6OwD?NLoZ z7DSR5&dk~4gr)P$1k^5N6uC!3ZsFqhGvNUF5|c(v!M`w2o^xijan1VToantV9h_D! z_zJ^1wT4oc$V%o)0>Q<`So*^Yu&FZzM&Pm2YtScEQrXq6fN6Cu3y*|wViwISTy~yt z8&H#ImzHHN9g68?13{BK#{uvWI45_J4O(LjhQu!<-cSofH@w+EF;;tGo8{T>p(#z_ z?+lz%1T)YL%94#pXM`a?i0ct~Sz`?RA^VQrG{om!-s>S4)b9{Sqkv}_C&=(nnbNai zMnd&VNvSGMfJjA%Oj^OY0I0HDUy>5pLa%yeq3d`pZ|sjGVyv{ZCpsDO87-_{;c}p~ zpwG7lw{vYsaJB%yTDVh|93W=S327tvfSQxNM~M?1c7B51r{@jeNYt;AgPgClOdY%C zn4=dgB^mUqDjNgYhb0ExyDh=8?SUsl-)4=b3qNrLywqvh=M1%sPo9&Zy(vmSCo|hL zR|aKix#4F(15VqR^T>&H8l2%b!pA^f9*1=->Vo}w7zED_+T}o-QcagIj%o@P4^^v+ z6=?~jj1tupTu7$m4t1AFh+TEg%sFd08DpA23njfoRA-3xUq>ZfA|5jfI!qo586aBW zTW2Q$%c0q-n`&AkBfR*K?fCJGzXrN@uD22HKgTByawq!sj}AEZ-l#>u!ML8Q;~uv< zf$i?V$7A>~q9x@8<>P;|Xv(%vM(E_|4C%t5*eOdwk-(^!%3^ZyN_wCvZxDxrMZrfP z=kWLFf;_jaVMh%W1EJX#!*MB~heaWaP)5)L#w98Aia@)g^nzn3<`hi^}PK7V*lyhj)Gz4s0LUn#X{77au2Q#pzE z^N;`~;p~tlO~IhR9#T_J(Tq*XH6H;3#bY<3!_FXqlA@XWB%R?Y0;u@v=oA$ZZz9n0 z>6*Zh>H>sQqEVrRq;7Xw^e%+g$Tq}$!Sebw{)z@m8-JAEWyCm5+Xou!v|u8CXHQIK zJM?hzu=x0Yechw|B~&M6C7G8LQ5A+EMmENLG=_ZP4CA_nnJNQ~V~T^oJwWbCPbSrK3ZW_rcJL;f97JYA0~%;Zc0KyUjzHQC)<7vr95#~ zS!~T}XS)UIS7O@aaY2`zA9UOZY}QF4*mV7Hm8?^DWE2s3m{6D?-3exqn%~GMB};{5 zUL14QFDDKDCcs`0f`5}U@0}5NBd!boUIjjh*$6;YLKjU0Lqi7G)E( zN=(*J7#vD{i?@XO$wr=@BdDrftRrM21xppS2dJpmHg4dPzZws7W-hUg>TAS+JL+*k z&&dRBK|0FF9-|BuHe{97;#L(gdR#N>Fsb+lsg_b!2S5R)KFKy_eoS_cQS}xXOAoh@ z5yPEWY?TjrZc(n~3U{Wp<8DU35tj3-Rc|7ge(@m&D1gwjw|%=Y?KS=>i1t6Fd%Upl z@3quustA^GWtde+aCd2{EnLHZ^$VfFguW@f=DX6fEUni%U8Ox=!|k%g$0NNU#9KQ+ zt^rz(0=&VGVb&yZ;5^}V6K=99fC~J5g1Wx7SnRxo%Ps!8vE%_VKu9Pd8hxH?^8m8? z1loR5OmFG=5)oU-6;q$JL@~5Qs7HzVc*94k4eqf7bYRe%8bs}eq>(-rq z%)e%trDJw1e2v^v&cVl7_D~*RA-#d}gN(p;JyAsPcdJoIZgCTY18E!sqKJn@Z;43JzFV%(BgYYHt1Bc(u8>Re?n z(v3z{yS#Ivt;dkq2NcJW*ec1rmYS=hE;JR|ZC96y{Q!AjVT*VYoL_oj3H6>Ycg!Nr zW0lCfrLZ&`PjWi^2U+x5^2uG$9uvCU(g?9qfC(!`)0^%`yMKqVH=e@f6T+*`V@I)J zausfj&ZOq?A-HkZrBx4ZU;YdFPwkIc;$dI)^E6+nI`kN9HcM%7WpptPjHGQ<<%jF& zSl+xyLV(kBvs0V#sj{Y16*wHcQ9R=zjJaX;lbg_g7QFTQkpfGmU*m@LDt32w1qrUY zCYt>Hcq1kjLHx7sB};;O9KOkPu)`ItbTrQ*5>G>dSu@EMjM1kozvOF(b9P^p20n{N9&b{egHG{qi)F=T(N}U6RMQntZH6Dad?wN zkrk_jYzxE`t-p|Qnkrm3>oc(sPhMek^?)JHXoB6Jvp8OP;J~7~82{2)0^t)&b$#u9 z6F_tdG0tiM2b=; zQuC5M0Ww71OjT=&i~AZylYSNX>Z|DUlaWQ%O{3yT*&3k+@Ln%t5LBNL#j}Vr|z{3vz z#cNEo9Lh&hAgMIKc|#TPE3W#mtzn+KL<)y+7ra!;H`2u>Yt>37Q9lv~V-q~vISFm4 ztiCvocA*FO_g|f4taMc7*{|R|JWo{2XGweG75PKPoV}U$%eBV$t5Uwo&xbq%e-Os- zaAtXvJ5V?o=CHcagd`#(Epha;fwrJZT!N@6=#dJ=PI8f-;6@w{(dJ7T@Tt9405W{T`wJzQhrMY8Nak()X>%%BQAq(!&YLI;cX zH_^n_TX;(y%MEjHw=M_Iy3h*Yg%PE-!!yW!$mSM+i-g~$ziu4yz%Mc+n3++Ge4u|XXWt2u4^IX15$v&rAKr8Y@5s& zHWyNW9$9yKT0ni|-P<6m$RVK?%#lRLv7>LbpqQ5DA}0O6uqJ;=!GSG=5It&ijBg#M zQ?U3G=c7pLi>m;5o^+lsn^KA6yc!AexYEJ8Z56TeC#A89lCvl89~FeZ;)`(OHU6;V z7-Jn46-Wtd2V7x|wq?Y*1HCYjtl^;}1_2331r55>N3?Z9%-dsk@4Xlv$D!n_2J6(2 zC#faLca|^LQi)70rspKUom%ui!#{V{czqwpYkKYt0JMMimdW6aP#Z7u{B(B@5hE#e zS)qvvE;=)I$#v>9AJmASavDCK8$m}j$GD{*3pWReE7 z?A4T|CdTZ6WIJeo!r=O`R{F$CkD+de?TGJ7faVqoJh6BJF(8dglg!_wKMhj!NVp<6 zQ%P+&20f-&_+V0sDMWD|_iO1;`w>U0^_Us0(hu0VbC;hn9RV9{ag6aOsP(`E1lr==LR@Txi_VIze2?z0< z!lfjHz!q8g=#m-*8J(N;AlGl9M(iit@}AW1aa_%REp$h4ALxCIn*(63SK3e4+d^Kf z^u;223{(4I2<0EzNZ$@=6BprUAXO3&Mf1AA=fEtSAPVxP_f4n*7%ab(6Tfl6{`+BQ zqOR1I`>~#HelAK7PclNoONu`MPx3z?2NbL5{aDYad>-%himl2M;Zej9YL-Iio=hbn zWpc8ai@%FPUh?tIciqz_?IQt0-+!YS(q9dIp9!$;4yLccgT-w2MNXzSxn4GXgj z6x(|e!;)d}T7h+(JU5+s&tG|to7oz?>McAK&*m?tASJs6ZAB{Z4$nJZ2;xGmp!mRg#5vgWAk5-rKcI`^2n}P$cM@z>gv#_jN z&7*Kp9mo2xbs;Vsp^A6Pab=Ci!QbCb0pDvbmyv}1i(*lb(S=*YvL!dB01C_J4?&rQ zJVw91EG7oi4_G-(OtBgJ)sIjoG=Phi<}d|6m#;Gy@`&ygUzJB!z2YI8L*gS?n%4~} zknEqygqTpPdUMxQ6ECwT(2T40*3l`vEWhXU#*FlGTf|2+FK)5Gp1ynnCL6kPS*HIeJotv0k`7}TRC#>qy2f4YXHgBcfbuZZgg{s!f5sV4d}mv-Uv$aD``WB zW97tf-0H<&9jk#EppNV|@ITD_f<-&{x}h0!fTtUI(z<$0?U6Ag2f`5h3Ho2)`Cckd z&;D#zk|O~D@%<0*8fdryRZU{m=rr`OWAX4sENuKw#eWLE;;aK0|K?g?o94_80noVj2kO2U67O^(V=`C5QZE3`qWHiqsfo zbl2FWOF_*bnO=cH`nhM8*n1SQj?Cc&rqWNyw~*S=CCuyWQxz5uVB;=Mil!Nzb6B+$ zZbVbe8&DxWlR?q~xZ%nbL{`!6aO;2~DTtW@<0Z#&HC)`FW>A`Ig&9CVGovrKf90v+ zEh}5KG&I;WvR5@1>5(kWn{m=_rMzHEskkh#GwJglc^7}@%ukxi7<%9|>R6?RZICx=F1t88Hsi?5&u(9{&av2N_%0AV^qz&fiv zVJ~1a`Bjs9ec$gxqmq-&jHRyGQ!E5`oI&gA!;f4jZZM^YkO%;Pe9iUSUD1gzi3PDt?U81IERbIPqEVnpCJ}k*V(+`lW;a zS0P2@Xy?cxSFK~O1eehx)MP2U*1qW}SL^IAmP5$^JX9B))igE=Dl^N?6I(O1Rp`A| z>^G}Mq#D!ipetKt`YKoH>Ke9GpZbVAbW&yU=5XCcTf$;PauqqWTCfE|PtBofsaAuP z!1PX4>o9CkyW-f8w-MWl%33?&M)Gykd|)>&r0X3ER4ae5CmP%rk+zmgr#=Yz6>o`jzLEHI58ZloQGNU2zftp~ zUUdcO)On%d_v-!@TdbmhX_snEB}E*g2Cz+)(Lu6K7nOinm#$Ql!JSl8EiBnhHQQpp z4#8;u)G=01-QX@dTEVGQL9m_oreUO$UUZfK&{5&ifI*D)^MB@3DjgZF5WHGBiw_jN zu#i<`$SP$l#+K-+a}kyb#Z?KH$$t{4v8JhOb{0xf97_MbS-t#YvFb+C=B?qG-Z!I3 zRYtf@Ka-_89V-_?h@PqFGV-fPkV3#3Y5Q%aO!*e{VxZF*dzy^4>-Q{2@=iRc6av^@z<_fK#ANe6e6mxw4>o`n%-s2 zVu|#rod#_r{}nN~3{kyjTTI8Lx61r=9xUCmV|cxvXK05=?nyzmr@TqqIg8w`TPDPi9TY%p3phyQoHs zC=Oxd8V*^YLh4`bL8k;fxcEXg7)nDLSPt zdAW5dZ~4%l1~Z2p=lTHO_iMEHrPcfL#rz{s)a@--hV>maKig{7_u;l4Nq`x=S<3H+DmNC$fgihl&w;}6&I{twUXGUEg6 z6c!XwN4`C9wwBSbs3U&Mimrh)z$FlsK~0BaQe+x3_Gw@&Pi9`DJMa|T#X3ALyb2?S z4r2HN2_)ZbwoLw#0iQ^`&fakGKdHaU>+SUiNgqm#19j$39Ho$=rgYPmfaX|Qi%E=f zNg)XZ?a&;lnSReSi7vS>4mjtlHd%59lCAM+OMnE zpR;D^F509rg7l7&#FIriuOO#-Uve#H}Kva5L^eY&qZH9UBHTk#Ni6 zM{+OaPE`*vjog;hM!q~#z3U+TQ&di49yoN;#e7sNCF7VHNnKBzBVlK$zykjX@TP5m zyJj;p{$>I}qs45CkO99o#Jd;9Hf$>pc1|}iwWQL zF$$6bE5SVo&(Gs=mTk!&bC1K)g;PQ^Ku}6PxcI(WNa1-80)0O}i22L-w^&G}4P6*N z(Tvq{6@PU)-!u~s&7EDucHW^YvF0GmU@rM4qc}4VYS5CC43p^zjK&D`{9UadZJq*^ zFz5CEa=uhSW%w(8@I3f~=l@T)-SR;sO&_8sy|Y0P7CG&=ktVb2j*}Hp$!*E33sQAE zQlW{VZkShF^3QfN0!M^kZ?7#6{5*(QVL>Yx?c9jDgM2_}(67JaPR+cWS_;dS5t#Gjf? zv~=;nB0Tin8veV37BO8aS89+tTeW1XEb$z!gu91L#tr76*43PU2IL@R(2Kl{wODXH zkHcg1Xs;dM4>h}Oun$=Wch*R?h5n^=>CQOSQ<3{>u}&oG@`D{v?0JmpJq`tLy$Eff zpzZ8o{mLeq6w@Q$L@P?=d=4`qKqQSL_36dXa~zgfxGasWe@b_FM^m1Vm%%*FB#U)j zIkPG+BZ^E8TS#A+1^iaQwl~)7>k8A+RIDpAf|`JJ3CyVKx|OXrhgzX&t?*Abs!P+U z*fLM&#y|1zE(_EyrQ))DK2}&&2^#~HR|j>%n)JEkX4%a+O*$iVWJ6*5o@3W8;dkAE z*4~Owd`y_Cb(D83mhWvc%C1aJP0iBb^9^?=qjWwQT;Dj^02;T8-JNS}8R--AXNtuM z<`Kb~=R|TT>GzR`u;@t+9xngX z#!{c>A(2<`1GLvTM0bd~A14Z; zoj+i$_yH@&&(Ord*@D5s*~rv3$qo(a|NPVa-@rZRlmPo4E;ER2Wao+nhC4JDNiY3L zq8I*L>Bad!U+n*VF!u;435J&u1lkX`k)Jo{7hJ*ci&*8)`&+<(fLQ*2NAUnh<39@@ zprT4h(M=K)N`rZX*fz2;kmQu$-#RsE&Ys;i*YJhE_fcLMNP#2C`2%i>e)8*xwn@TC z)7O*Jz7JEA*Pm|>pA-Q_{*3L6B{2St-RMORm}iS*Uxoe5rx@^t-qyH1s9!n$OEc9TeVxtnih&QLQ6)N zn(lrUW~Du6Ar7x4ClRuNQ{8U(9l{P+OP&+4m@#S@bx&N5L1TnfWR1Zr=&kJ{52T&V#~0J`JnaE&ga zGt0J__^9CH`Y;Gsh8a~44}Y^YoTG&*L^GmeOlONolhcEiD^aByUICfCln<`Xxq3%G zx>YE!%K=?jK70~gS1HX5 z4%|pB@|HZjU5WbC;{2VxD5LxggPej!zPmDmRT>&y2^*DN@SkYub#N;vZHP$`(#bQw zGIJ*aTZJ(AF-A1QNAL6?{Zzrsa!m4fPZn5`HrUY|$$>+8Qe}Xp!h&USWDz`V=FT7U zB#6_0LG=Vb3C{#%Ylbx}I*8d`nO~k|LG2py@V5>Sgc9an2 z)h@TZSobG?FkZJ+kLM=_b04Y)-n@UM11rL|(VW{72Y9CaZeMkwWYK-V?q5Uep*!F# z4})<38^6W{Z#PpN-gCuHvprXTD|Y%1R(#2gu6I9zunw|S{o$iz?Qy8bBm-`Q9mIGAQrk&gaYJ%;ufE~vCroC3uRVtz zQcDs11;BU#+HT~hCc*pV5=T|K)`zaw$Zo^ANmirN%WE_sFgcKn*2Z(PZY{|~3{-sy zj+UcNkBpJYc}AR0Sz z259`1`LDvVvGhDvz9r@O)?J2GSVaCed${cCwf~`lU(YaG%5S z!$5h8Sh;63@QW&M@91Hy-y>RNUBc{aIv=2RasDT#Hs4}Ns^P-g!%0pFYz@5V$$AWy zB_M5wd4K(6=X!!5kod#H6LN;-8cdRJB#3?u9Lh<0cKj}(`V&!q^Iw!2%K58-Ce?}v z0CpVa6;Z$BsZe2D0yY-8(2zJVahAHVV#tx@WJk?|)&{oeXY1&XmN(tSzbOoS=LR9Z zfxamZ+Uc^k2!%46P9%CCa&EFWzP=tFFakiD>`av!6GZ5KkFcM68XJuW2gm#DNild! zjk0ErfHva9==NLBBwg?QGu^E~Fy3`30nEdM^$VbnFWj+OiI?h2Qg&fbyQ&EwBT+=( z6uGLcU>Kd=Pq;HEnpji~j(X(e-*9aiG9x(`@sPSbsd|MgW=1f8q>-TE`NebzA}`}~ zv)oB>_Qac%5>P3`@l2rzZ^JMUV&xBOAHl}eA&;VFg_7rIziPPVOKw|Ql5sN$(cakv`lWLB+ckG+=XBRKmldDnZdaN(vmjYK&${jWmS_xEa8W#WP?(jzQA zD|)xjiNPDkd)49J8Vpa-0vv-$0O54?Q`cVscd>XN>;f*_Qe>#B4CFv9l>uYYigA(D zwkV148d#YfYGc0P|DLd?v}GF;1d%gWJ44wJQECq4WqmQSvMDw@Rac@+dTMDlq^Sx+ z8j7fH6kzfi*-A{NP}7vX8`8WRJOjd=wAh9P`sG@(MsKt1s?ymtY!BEsi0I%Lj{fh6 zgyWPZ+|~~x+x$eS1^$PT8N@ZAQg94lIP5o185-lo%Yoq#3;7_>k%540seU}j|Mws) zAVBl?w0Vgz$+X|T|w%ky(zrI&5# z6-_KRPOspd%wP($0xREK{LKv2EdjVUX1Pdl#MQSy$1H~2#4ULjr5Ck6&8Y$lP|HbP zM*ashyXY{tDl<}AA^IlSmzN=Fb+?Q>$!$@xp1kxtokoQ7iPMb?kX?nIu zbi~+ONZM_&JO!I-p-6Ycne5gs<1h`{Ta0>1I>n%3iJnExrGvil5SA^odMw+wDiymo zyL?QSExWX(>S@Ba_yxF^R<#!cVAY^0!2&jcjC&+`?g=_xWsX%lrrb-b;!d|xuclr$ zr{&M3<8^|ocWzcpe=<(p%c_Wy8_s&(puPNh#M|7ao>8ONM@m`OzVL4vfFa8%(7=bh zg;kHS0&3#KvYZS_E-Sa}GUB38*CNEYZuSN&q$EO#f3o~+tyi_BT|nn%2gPDOGt6d% z9@A_-4L!6US#oW+Xd}TmFa7JMjLULzGIPb$Wc-&KPcRdU$&OsmM@;0@CgYY)5L4w! zRi(jJTXTJl(a&4Y>lPoNqQb@4S$^yE++c!hnB7dHwTv3UL#8ThTj+Vyqs#cj; zP>WPsY+DDb7SQNwHaU2*!&cajA})I42q_x(=Jc`IHR5!gL~*s52jwUP2c7}t)w?Rg z=Xh~>9MqHKeHM~xK#U!}EMG8TW(@0*BE}OL>8x$R!WfR*^E4bal*yK?5vr>h&5bj$ zJ@`3#VUnAgNLHHFYTIPM>6tWmXx50HurAmwB(%-iG-{nDTf1VN_3GsS_uv=^m2iY3 zFr_{QmSnmWSukjOXstbz_SA;D<@fDdmp3jNU^0uH$y;`xb2lI>hgrXoKYi*>|R3IX*m&a#7v61%%jnO?3u2BmGujHxUXD6b-JsQgi*J(qVEFKsF(1GeN9Y7a_jBE zu4Q3>R^6%il~^0M(l#EpHG4@-cTD7jG9o=^CM2@=Ti8gb0mjmX`S=i580zp%rtVu+ z96jOH>Z(oGy?(l*u*>8&qRyg`c$p(+5&AS5NIbIWgJSrh{t9N19wH6u0tam86kR#b zrsbsu?IF9iP_-BqjrQV-7^ggN-LZ39F_4_9HPyzcEF6C|*p{l`*2hd{CJI zETq&?a9zOwM$=YZh5#a;Lw%7Rh8Iicyg-g4h_KDPp@|$f7*Lknx6Cs4Y~nUeRk-4oky$sefDDv!uW)^_3V&)5eMS^9|9> zNfKj5DbCfz8HOAW3eyeI;qlg^hiPWtl|MIhRI^m{FdJSCtQUd*_4Gro^$dFk6^PtXtneclC{C;Y^P@}^9e>PhDD zT8JFXjgCJfSRhL#7r(x?ZgxIEp|n#}&3xZb(+qXt7*~GLqo|waE~wg{ii?p>fvolh zKTNanS%k)Xf~-=VPBi%CATxQ&qX#FfW3m}xzBKNjJ8>6{{>h2`dVFNBV6VuZ~`?O^hqMihN6t!zAu75BCL1&=09)b}k05izJ;dIW2c) zmFVo66z3b3Z_N2_bwA{6Q2N(+yT1jghaq$5n&th0EW`v5XL^66wpH*qY{}N;VksK5 z+T_=jJC0+plUzl@)5rqO0zNE@n6Rbr4Fkn8SWQCb8Cvy(Camo1PtaG}yE3qNuj(y5 zR>I=fOBwktmePRHELg(ittp5b1xdUvv~2k8)iK}!U=56XuGsP3R==ku4cn95ML^q{hglx#OUNs-;yL$bVChZ##` z#+HQM$K3SZxo1A}IotR7oioqp%=zQ|mb1gwNXR;NY~#{hY?R;D5jbPpydA&z8?i5^ z(7~S?$Efi$Er}$}@rWkY^RpwG%kJ~3eC0)x1g^otGKD|&FWP*>Q*||&E-QXE)j00T zXILxVn9Jv;s1$mwTxHm3^{S|fC@IbW%-J2$ZF5zg&U!qOdP^(+;4=C4Af|Dy0$*pB z_(3jn)kj*rls5t?kzerWIk0{fH!1GE?YT>>_K5Sc2x~aFUN*hVkv%1lx)k`$ox@j+ z=sUs~)Uh??sm(B<)E3-GthrOzBl!3LoJ=&1XH63foceR)616%EE|v5$a*@Z=*7}>S zYb|Bjfes%|4{B^$YbZueXK3-mry)w8(;C|ei(!OZ3XElJe+iHpm+ z;W|01lExuev}nP)RQZ8-+Ffl~Z(F6TwO#ZLHEKw0MIX@y-B~pSWkO=y<~CM?F}`|N z#$#AMn!H>;yj`0&iivBC&~cNqXgGi4O+nvf@KRFl70aFDJ-^Mi@q9;1OC*PZ2!i-*1RqVMNQm^N%^8rgFi z>2WQTu*>*pe_CgDJn(87w)T|olB386doCwWCBar+&A6k~p<&E@yJ=L7VaL}Jw#?^D zgg$8z`;MTBb}83Kj{fJQ)JaAjkHqgfbKN{A@vJ{mt{R5yJRjX|+e|Xu4L_toQd?{5 zd~H77tTJ=viWH_^d4;bC)S14eh7<7feVih_A=Ki!Kx%?}>wJ2vzz3Ow0$Q=AgXw3x zSwoWF4Eys3rrpLIrXu|BIVpz!CtmYLWw*_`--M=%`Ta{$^TNKNbCfN&k=)@&5A|zsjVZgaJ#f2~T{uW<)e^XDX=Vc6{HIQtobXYWJ_o3vcb4PPdM@?Ibz$Ab08p z+xGo#XD8NelAnkvv$*o7nUnayVGK=)Ma zX_msSRaW254VSFmLi62MC%F3orMogksA~_RH6)3~NRCU2C-J3v(T`rar+<1$Av0W8 zPExTHETGN|2F29q;SWFQ&>Od|e0TqvF+TB*c2`dW&b>DF`-H`sL;Tay{zJmC#r_I; zXNAr5$@e3sUS_k003vnjQn3u|t31C(^!>i6CnjV0R!LVjtTaC2J!1nn2#4_7Bz+;GK%REG| zJ}F{?Eq%+IH*%?I21|O@&`J1uXgktHs99|^QhGq9R9yLYm}oK(AA&pXkA#-8>P(qw(?s$ zPvmScp73AtmC9aG*M$~!{u9M0xA)&pyoh$Ke&>D1GfCx6&B*k*3ciGiF4daJd*-aR z9c>rqk_bUZ6h0X8iItZKxj)X-CA-|6uLSvgRWN=t;jLrBts`E1c0XcgDU5gmAziJ@ zUJaKS6K{twv`d23h?Y~!&)Fb(v!t}b_F}WtZCS9mVKQr1HV+2HJ67PXtHHW`VHvMB3>O?Ge*a+*IuV2Mqi8jk zo8eW>$1nJ}n*JG=+veACXV}_w=847U%2Q*XlT5JT9-eYFZGUKaeb7A!w-<={ok?D{ zeSL6oUBW@yt8)EWSi+ePiRb+aqXIE4aXWc#mLFFb@A6BdhYjo0#VvA9H9O6d^3OTG z53y8Eau2S<>1k1fTZtm$;zH#YerRcZJ>vec5J>~eF!&3&+>$l zXfE$2b$Js$d%fs-F}rcK>es%PRSoGC8Y7(^vpv^#O|~B$^)udRYRvdncQr7@VX>?2 zd*%R_+r3KIK^BdW(hMZlMM$&S#Nmd+=JA`FOn#Kh`4Rdnuf^mDR;cvtUeq^c)1&VC z93~F=Rs@$T$V_i!c|vLCd+i==^@ew8L^+2#a7S)KOE9A*<8^Eup}a1Ahi9=)w_Rk! zu6;Lcv3O;MQla6)Hec+FIW}*}C1pz`?dX*ETqd*g?+oia(G#H)0 z7WiRt%utmFx&jjcGJi$@FAkvo(10vBhoMt1T}bGD%Eo#h`3W8YLj%S3o(KBg7%!qB z-BvxM6g#m`LU}fph5Z-I7Wx|)LPh|h4DI35a0Uje;|vVKbZ|7sPnh{(#34{}Q5set zyx#%>ATMpMj~4l1sVG`=+oAz1ONv%BW;_JSFNp$l7kAPLcSipNk>K(;0yJCVhnb%Q zb#r;a-VAnVFa+@HiYVv|4@LK^wh9Cu14oyTz(22XfWqhhO4--}9SD*KwZ9+$x&ZsX zkiGYjb{c{J5cfp^7J7*`q3$mLKo_Zsj+eBdK@rPrpv1By@GDFbJ9NZI2Ohoh6V!s? zoM^>?Wm$kO3V9fy!C$GXxg1`r?d|=R|F^nM$oRS2Bgz^T5IMNd#EI zi(uWCnT_4R&^pI?Ss@kyZUO)7)WkuZBH;QI@^8;H1d$ge5d|g^CBX|sF<{S~<{5Nq zD-Nw+rG1f5o{jCi4?%hW&3^$}vjImTD;hfBT^vnvp{M|;FpU7{?3w96?uQU~j<%@j zY)|Q+mL!_A)oC<9XShQLQPOCT?TidSXAMCIE#tukRVm0QLLB(F7J8q{#;#>SkQ!}H zbXwk}`s*sJ{ypihjA diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 98debb84d51de..4e86b9270786f 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.2-bin.zip +networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 1b6c787337ffb..65dcd68d65c82 100755 --- a/gradlew +++ b/gradlew @@ -55,7 +55,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -80,10 +80,10 @@ do esac done -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" +# This is normally unused +# shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' @@ -143,12 +143,16 @@ fi if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -205,6 +209,12 @@ set -- \ org.gradle.wrapper.GradleWrapperMain \ "$@" +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + # Use "xargs" to parse quoted args. # # With -n1 it outputs one arg per line, with the quotes and backslashes removed. diff --git a/gradlew.bat b/gradlew.bat index ac1b06f93825d..6689b85beecde 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -14,7 +14,7 @@ @rem limitations under the License. @rem -@if "%DEBUG%" == "" @echo off +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -25,7 +25,8 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @@ -40,7 +41,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute +if %ERRORLEVEL% equ 0 goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -75,13 +76,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal From 1b79142d9ea6563fa1723d1beb4e341d1fc898a3 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Sat, 2 Sep 2023 19:25:44 -0500 Subject: [PATCH 37/41] feat(EntityService): batched transactions and ebean updates (#8456) --- build.gradle | 6 +- datahub-frontend/build.gradle | 2 +- .../linkedin/datahub/graphql/TestUtils.java | 46 +- .../DeleteAssertionResolverTest.java | 10 +- .../BatchUpdateSoftDeletedResolverTest.java | 25 +- .../BatchUpdateDeprecationResolverTest.java | 26 +- .../UpdateDeprecationResolverTest.java | 2 +- .../domain/BatchSetDomainResolverTest.java | 36 +- .../domain/CreateDomainResolverTest.java | 6 +- .../domain/SetDomainResolverTest.java | 10 +- .../domain/UnsetDomainResolverTest.java | 8 +- .../embed/UpdateEmbedResolverTest.java | 27 +- .../glossary/AddRelatedTermsResolverTest.java | 6 +- .../CreateGlossaryNodeResolverTest.java | 7 +- .../CreateGlossaryTermResolverTest.java | 9 +- .../DeleteGlossaryEntityResolverTest.java | 5 +- .../RemoveRelatedTermsResolverTest.java | 12 +- .../glossary/UpdateNameResolverTest.java | 14 +- .../UpdateParentNodeResolverTest.java | 14 +- .../mutate/UpdateUserSettingResolverTest.java | 4 +- .../owner/AddOwnersResolverTest.java | 15 +- .../owner/BatchAddOwnersResolverTest.java | 19 +- .../owner/BatchRemoveOwnersResolverTest.java | 17 +- .../resolvers/tag/AddTagsResolverTest.java | 15 +- .../tag/BatchAddTagsResolverTest.java | 33 +- .../tag/BatchRemoveTagsResolverTest.java | 34 +- .../resolvers/tag/CreateTagResolverTest.java | 6 +- .../tag/SetTagColorResolverTest.java | 8 +- .../resolvers/term/AddTermsResolverTest.java | 26 +- .../term/BatchAddTermsResolverTest.java | 19 +- .../term/BatchRemoveTermsResolverTest.java | 17 +- .../test/resources/test-entity-registry.yaml | 295 ++++ datahub-upgrade/build.gradle | 2 +- .../upgrade/config/NoCodeCleanupConfig.java | 4 +- .../upgrade/config/NoCodeUpgradeConfig.java | 4 +- .../upgrade/config/RestoreBackupConfig.java | 4 +- .../upgrade/config/RestoreIndicesConfig.java | 4 +- .../upgrade/nocode/CreateAspectTableStep.java | 6 +- .../upgrade/nocode/DataMigrationStep.java | 20 +- .../datahub/upgrade/nocode/NoCodeUpgrade.java | 8 +- .../nocode/RemoveAspectV2TableStep.java | 8 +- .../nocode/UpgradeQualificationStep.java | 8 +- .../nocodecleanup/DeleteAspectTableStep.java | 8 +- .../nocodecleanup/NoCodeCleanupUpgrade.java | 8 +- .../NoCodeUpgradeQualificationStep.java | 6 +- .../restorebackup/ClearAspectV2TableStep.java | 6 +- .../upgrade/restorebackup/RestoreBackup.java | 6 +- .../restorebackup/RestoreStorageStep.java | 5 +- .../restoreindices/RestoreIndices.java | 6 +- .../upgrade/restoreindices/SendMAEStep.java | 6 +- ...pgradeCliApplicationTestConfiguration.java | 4 +- docs/deploy/environment-vars.md | 2 +- .../dao/producer/KafkaEventProducer.java | 53 +- metadata-io/build.gradle | 2 + .../metadata/client/JavaEntityClient.java | 16 +- .../linkedin/metadata/entity/AspectDao.java | 83 +- .../metadata/entity/EntityServiceImpl.java | 1275 +++++++---------- .../linkedin/metadata/entity/EntityUtils.java | 44 +- .../entity/cassandra/CassandraAspectDao.java | 101 +- .../cassandra/CassandraRetentionService.java | 45 +- .../ebean/AspectStorageValidationUtil.java | 18 +- .../metadata/entity/ebean/EbeanAspectDao.java | 254 ++-- .../entity/ebean/EbeanRetentionService.java | 147 +- .../ebean/transactions/AspectsBatchImpl.java | 67 + .../ebean/transactions/PatchBatchItem.java | 188 +++ .../ebean/transactions/UpsertBatchItem.java | 173 +++ .../entity/validation/ValidationUtils.java | 38 + .../metadata/event/EventProducer.java | 25 - .../metadata/AspectIngestionUtils.java | 31 +- .../linkedin/metadata/AspectUtilsTest.java | 4 +- .../com/linkedin/metadata/ESTestUtils.java | 5 +- .../com/linkedin/metadata/EbeanTestUtils.java | 10 +- .../entity/EbeanAspectMigrationsDaoTest.java | 4 +- .../entity/EbeanEntityServiceTest.java | 249 +++- .../metadata/entity/EntityServiceTest.java | 363 +++-- .../timeline/EbeanTimelineServiceTest.java | 4 +- .../java/io/datahub/test/DataGenerator.java | 359 +++++ metadata-jobs/mae-consumer-job/build.gradle | 2 +- ...eConsumerApplicationTestConfiguration.java | 4 +- metadata-jobs/mce-consumer-job/build.gradle | 2 +- ...eConsumerApplicationTestConfiguration.java | 4 +- .../token/StatefulTokenService.java | 13 +- .../DataHubTokenAuthenticatorTest.java | 1 + .../token/StatefulTokenServiceTest.java | 6 + .../test/resources/test-entity-registry.yaml | 10 + .../factory/entity/EbeanServerFactory.java | 6 +- .../entity/EntityAspectDaoFactory.java | 4 +- .../EntityAspectMigrationsDaoFactory.java | 4 +- .../entity/RetentionServiceFactory.java | 4 +- .../boot/steps/IndexDataPlatformsStep.java | 18 +- .../IngestDataPlatformInstancesStep.java | 26 +- .../boot/steps/IngestDataPlatformsStep.java | 62 +- .../boot/steps/IngestOwnershipTypesStep.java | 9 +- .../boot/steps/IngestPoliciesStep.java | 13 +- .../metadata/boot/steps/IngestRolesStep.java | 13 +- .../boot/steps/IngestRootUserStep.java | 9 +- .../steps/RestoreColumnLineageIndices.java | 30 +- .../boot/steps/RestoreDbtSiblingsIndices.java | 17 +- .../boot/steps/RestoreGlossaryIndices.java | 31 +- .../IngestDataPlatformInstancesStepTest.java | 25 +- .../RestoreColumnLineageIndicesTest.java | 26 +- .../steps/RestoreGlossaryIndicesTest.java | 25 +- .../test/resources/test-entity-registry.yaml | 6 + .../openapi/util/MappingUtil.java | 20 +- .../java/entities/EntitiesControllerTest.java | 10 + .../src/test/java/mock/MockEntityService.java | 22 +- .../resources/entity/AspectResource.java | 42 +- .../resources/entity/AspectResourceTest.java | 36 +- .../linkedin/metadata/entity/AspectUtils.java | 51 +- .../metadata/entity/DeleteEntityService.java | 4 +- .../metadata/entity/EntityService.java | 113 +- .../metadata/entity/IngestResult.java | 18 + .../metadata/entity/RetentionService.java | 88 +- .../metadata/entity/UpdateAspectResult.java | 8 + .../transactions/AbstractBatchItem.java | 92 ++ .../entity/transactions/AspectsBatch.java | 22 + metadata-service/war/build.gradle | 2 +- .../src/main/java/mock/MockEntitySpec.java | 2 +- settings.gradle | 1 - smoke-test/run-quickstart.sh | 2 +- smoke-test/tests/tags-and-terms/data.json | 39 + 121 files changed, 3582 insertions(+), 1822 deletions(-) create mode 100644 datahub-graphql-core/src/test/resources/test-entity-registry.yaml create mode 100644 metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java create mode 100644 metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java create mode 100644 metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java create mode 100644 metadata-io/src/test/java/io/datahub/test/DataGenerator.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java diff --git a/build.gradle b/build.gradle index f73fe42d45956..e12d520e12de6 100644 --- a/build.gradle +++ b/build.gradle @@ -18,6 +18,7 @@ buildscript { ext.hadoop3Version = '3.3.5' ext.kafkaVersion = '2.3.0' ext.hazelcastVersion = '5.3.1' + ext.ebeanVersion = '12.16.1' ext.docker_registry = 'linkedin' @@ -86,8 +87,9 @@ project.ext.externalDependency = [ 'dgraph4j' : 'io.dgraph:dgraph4j:21.03.1', 'dropwizardMetricsCore': 'io.dropwizard.metrics:metrics-core:4.2.3', 'dropwizardMetricsJmx': 'io.dropwizard.metrics:metrics-jmx:4.2.3', - 'ebean': 'io.ebean:ebean:11.33.3', - 'ebeanAgent': 'io.ebean:ebean-agent:11.27.1', + 'ebean': 'io.ebean:ebean:' + ebeanVersion, + 'ebeanAgent': 'io.ebean:ebean-agent:' + ebeanVersion, + 'ebeanDdl': 'io.ebean:ebean-ddl-generator:' + ebeanVersion, 'elasticSearchRest': 'org.elasticsearch.client:elasticsearch-rest-high-level-client:' + elasticsearchVersion, 'elasticSearchTransport': 'org.elasticsearch.client:transport:' + elasticsearchVersion, 'findbugsAnnotations': 'com.google.code.findbugs:annotations:3.0.1', diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index cf1f8ca3cdd84..fdf13bac0accc 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -96,7 +96,7 @@ task unversionZip(type: Copy, dependsOn: [':datahub-web-react:build', dist]) { into "${buildDir}/docker/" rename "datahub-frontend-${version}.zip", "datahub-frontend.zip" } -tasks.getByName("docker").dependsOn(unversionZip) +tasks.getByPath(":datahub-frontend:docker").dependsOn(unversionZip) task cleanLocalDockerImages { doLast { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index 0718cc5b01d7e..272a93fa1989c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -9,12 +9,24 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; import org.mockito.Mockito; +import java.util.List; + public class TestUtils { + public static EntityService getMockEntityService() { + EntityRegistry registry = new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); + EntityService mockEntityService = Mockito.mock(EntityService.class); + Mockito.when(mockEntityService.getEntityRegistry()).thenReturn(registry); + return mockEntityService; + } + public static QueryContext getMockAllowContext() { return getMockAllowContext("urn:li:corpuser:test"); } @@ -88,25 +100,47 @@ public static QueryContext getMockDenyContext(String actorUrn, AuthorizationRequ } public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + verifyIngestProposal(mockService, numberOfInvocations, List.of(proposal)); + } + + public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, List proposals) { + AspectsBatchImpl batch = AspectsBatchImpl.builder() + .mcps(proposals, mockService.getEntityRegistry()) + .build(); + Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( + Mockito.eq(batch), + Mockito.any(AuditStamp.class), + Mockito.eq(false) + ); + } + + public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class), - Mockito.eq(false) + Mockito.eq(proposal), + Mockito.any(AuditStamp.class), + Mockito.eq(false) ); } public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations) { Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false) ); } + public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false) + ); + } + public static void verifyNoIngestProposal(EntityService mockService) { Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } private TestUtils() { } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java index 42d2b864309ed..8afec0a889577 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java @@ -31,7 +31,7 @@ public class DeleteAssertionResolverTest { public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); Mockito.when(mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), @@ -78,7 +78,7 @@ public void testGetSuccess() throws Exception { public void testGetSuccessNoAssertionInfoFound() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); Mockito.when(mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), @@ -117,7 +117,7 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(false); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -151,7 +151,7 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); Mockito.when(mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), @@ -189,7 +189,7 @@ public void testGetEntityClientException() throws Exception { Mockito.any(), Mockito.any(Authentication.class)); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index 7e549f201c2c7..bae6f27a854bc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -11,8 +11,11 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; + +import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -29,7 +32,7 @@ public class BatchUpdateSoftDeletedResolverTest { @Test public void testGetSuccessNoExistingStatus() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -61,20 +64,17 @@ public void testGetSuccessNoExistingStatus() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); - - verifyIngestProposal(mockService, 1, proposal1); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetSuccessExistingStatus() throws Exception { final Status originalStatus = new Status().setRemoved(true); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -105,18 +105,15 @@ public void testGetSuccessExistingStatus() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); - - verifyIngestProposal(mockService, 1, proposal1); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -148,7 +145,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); @@ -166,10 +163,10 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index 634fd59a857a6..ce5a02bb573e1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -12,8 +12,11 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; + +import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -30,7 +33,7 @@ public class BatchUpdateDeprecationResolverTest { @Test public void testGetSuccessNoExistingDeprecation() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -68,12 +71,10 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); - - verifyIngestProposal(mockService, 1, proposal1); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); - verifyIngestProposal(mockService, 1, proposal2); + + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test @@ -83,7 +84,7 @@ public void testGetSuccessExistingDeprecation() throws Exception { .setNote("") .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -120,18 +121,15 @@ public void testGetSuccessExistingDeprecation() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); - - verifyIngestProposal(mockService, 1, proposal1); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -164,7 +162,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); @@ -183,10 +181,10 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java index cf11ecf865085..5d30ae08d6dea 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java @@ -58,7 +58,7 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { .setUrn(Urn.createFromString(TEST_ENTITY_URN)) .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index 9241661ccfc87..8cd3c71a21555 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -14,9 +14,12 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; + +import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -35,7 +38,7 @@ public class BatchSetDomainResolverTest { @Test public void testGetSuccessNoExistingDomains() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -74,13 +77,10 @@ public void testGetSuccessNoExistingDomains() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); - - verifyIngestProposal(mockService, 1, proposal1); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) @@ -92,7 +92,7 @@ public void testGetSuccessExistingDomains() throws Exception { final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( Urn.createFromString(TEST_DOMAIN_1_URN)))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -135,13 +135,10 @@ public void testGetSuccessExistingDomains() throws Exception { proposal1.setAspectName(Constants.DOMAINS_ASPECT_NAME); proposal1.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal1.setChangeType(ChangeType.UPSERT); - - verifyIngestProposal(mockService, 1, proposal1); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) @@ -153,7 +150,7 @@ public void testGetSuccessUnsetDomains() throws Exception { final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( Urn.createFromString(TEST_DOMAIN_1_URN)))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -189,18 +186,15 @@ public void testGetSuccessUnsetDomains() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); - - verifyIngestProposal(mockService, 1, proposal1); - - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), + final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetFailureDomainDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -228,7 +222,7 @@ public void testGetFailureDomainDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -262,7 +256,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -281,10 +275,10 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java index 9343e5d772826..8c19f1dc3eb34 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java @@ -40,7 +40,7 @@ public class CreateDomainResolverTest { public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver @@ -76,7 +76,7 @@ public void testGetSuccess() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver @@ -95,7 +95,7 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java index 73d1f699dfd80..92fb26288aa1d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java @@ -54,7 +54,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { .setUrn(Urn.createFromString(TEST_ENTITY_URN)) .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); @@ -110,7 +110,7 @@ public void testGetSuccessExistingDomains() throws Exception { new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) ))))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); @@ -160,7 +160,7 @@ public void testGetFailureDomainDoesNotExist() throws Exception { .setUrn(Urn.createFromString(TEST_ENTITY_URN)) .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(false); @@ -196,7 +196,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setUrn(Urn.createFromString(TEST_ENTITY_URN)) .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); @@ -219,7 +219,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java index 18b2b9a2747e2..decda39943dde 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java @@ -53,7 +53,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { .setUrn(Urn.createFromString(TEST_ENTITY_URN)) .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -104,7 +104,7 @@ public void testGetSuccessExistingDomains() throws Exception { new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) ))))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -148,7 +148,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setUrn(Urn.createFromString(TEST_ENTITY_URN)) .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -169,7 +169,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java index 17f7a1968fdf6..f1d44fcb47255 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java @@ -15,6 +15,7 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; @@ -41,7 +42,7 @@ public class UpdateEmbedResolverTest { @Test public void testGetSuccessNoExistingEmbed() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), @@ -64,11 +65,7 @@ public void testGetSuccessNoExistingEmbed() throws Exception { final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + verifySingleIngestProposal(mockService, 1, proposal);; Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) @@ -80,7 +77,7 @@ public void testGetSuccessExistingEmbed() throws Exception { Embed originalEmbed = new Embed().setRenderUrl("https://otherurl.com"); // Create resolver - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), @@ -103,11 +100,7 @@ public void testGetSuccessExistingEmbed() throws Exception { final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + verifySingleIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) @@ -130,7 +123,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setUrn(Urn.createFromString(TEST_ENTITY_URN)) .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -144,7 +137,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false) );; @@ -153,7 +146,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { // Create resolver - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); // Execute resolver @@ -164,7 +157,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false) ); @@ -173,7 +166,7 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( Mockito.any(), Mockito.any(Authentication.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 6bbf4f4797560..26c13186c4a81 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -27,7 +27,7 @@ public class AddRelatedTermsResolverTest { private static final String DATASET_URN = "urn:li:dataset:(test,test,test)"; private EntityService setUpService() { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), @@ -56,7 +56,7 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 1); + verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) ); @@ -88,7 +88,7 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 1); + verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) ); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java index 392ddf6ac4c74..3b47514d87181 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java @@ -15,6 +15,7 @@ import org.testng.annotations.Test; import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; import static com.linkedin.metadata.Constants.*; @@ -69,7 +70,7 @@ private MetadataChangeProposal setupTest( @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); @@ -86,7 +87,7 @@ public void testGetSuccess() throws Exception { @Test public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); @@ -103,7 +104,7 @@ public void testGetSuccessNoDescription() throws Exception { @Test public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java index e4f32133b4b51..2dbe637d16057 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java @@ -29,6 +29,7 @@ import java.util.concurrent.CompletionException; import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; import static org.testng.Assert.assertThrows; import static com.linkedin.metadata.Constants.*; @@ -86,7 +87,7 @@ private MetadataChangeProposal setupTest( @Test public void testGetSuccess() throws Exception { EntityClient mockClient = initMockClient(); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); @@ -103,7 +104,7 @@ public void testGetSuccess() throws Exception { @Test public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = initMockClient(); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); @@ -120,7 +121,7 @@ public void testGetSuccessNoDescription() throws Exception { @Test public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = initMockClient(); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); @@ -166,7 +167,7 @@ public void testGetFailureExistingTermSameName() throws Exception { ) ).thenReturn(result); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); CreateGlossaryEntityInput input = new CreateGlossaryEntityInput( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java index df24c23e89ae6..94f0d0b7a1143 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java @@ -13,6 +13,7 @@ import java.util.concurrent.CompletionException; import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -23,7 +24,7 @@ public class DeleteGlossaryEntityResolverTest { @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); @@ -48,7 +49,7 @@ public void testGetEntityClientException() throws Exception { Mockito.any(), Mockito.any(Authentication.class)); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index dd54d7f9835c1..3906d1188cb17 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -34,7 +34,7 @@ public void testGetSuccessIsA() throws Exception { GlossaryTermUrn term2Urn = GlossaryTermUrn.createFromString(TEST_TERM_2_URN); final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), @@ -54,7 +54,7 @@ public void testGetSuccessIsA() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 1); + verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) ); @@ -66,7 +66,7 @@ public void testGetSuccessHasA() throws Exception { GlossaryTermUrn term2Urn = GlossaryTermUrn.createFromString(TEST_TERM_2_URN); final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), @@ -86,7 +86,7 @@ public void testGetSuccessHasA() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 1); + verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) ); @@ -94,7 +94,7 @@ public void testGetSuccessHasA() throws Exception { @Test public void testFailAspectDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), @@ -123,7 +123,7 @@ public void testFailNoPermissions() throws Exception { GlossaryTermUrn term2Urn = GlossaryTermUrn.createFromString(TEST_TERM_2_URN); final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index 36f909bd7ebe5..064e2dd3bd59b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -58,7 +58,7 @@ private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, Entit @Test public void testGetSuccess() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -68,12 +68,12 @@ public void testGetSuccess() throws Exception { final MetadataChangeProposal proposal = setupTests(mockEnv, mockService); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 1, proposal); + verifySingleIngestProposal(mockService, 1, proposal); } @Test public void testGetSuccessForNode() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -98,12 +98,12 @@ public void testGetSuccessForNode() throws Exception { UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 1, proposal); + verifySingleIngestProposal(mockService, 1, proposal); } @Test public void testGetSuccessForDomain() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(DOMAIN_URN))).thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -129,12 +129,12 @@ public void testGetSuccessForDomain() throws Exception { UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 1, proposal); + verifySingleIngestProposal(mockService, 1, proposal); } @Test public void testGetFailureEntityDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index 43c6113d194a5..a78c28890fecf 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -58,7 +58,7 @@ private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, Entit @Test public void testGetSuccess() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); @@ -69,12 +69,12 @@ public void testGetSuccess() throws Exception { final MetadataChangeProposal proposal = setupTests(mockEnv, mockService); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 1, proposal); + verifySingleIngestProposal(mockService, 1, proposal); } @Test public void testGetSuccessForNode() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); @@ -102,12 +102,12 @@ public void testGetSuccessForNode() throws Exception { UpdateParentNodeResolver resolver = new UpdateParentNodeResolver(mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 1, proposal); + verifySingleIngestProposal(mockService, 1, proposal); } @Test public void testGetFailureEntityDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); @@ -123,7 +123,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { @Test public void testGetFailureNodeDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(false); @@ -139,7 +139,7 @@ public void testGetFailureNodeDoesNotExist() throws Exception { @Test public void testGetFailureParentIsNotNode() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index c7f1e16a0ea61..9bd44e9ab0906 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -21,7 +21,7 @@ public class UpdateUserSettingResolverTest { private static final String TEST_USER_URN = "urn:li:corpuser:test"; @Test public void testWriteCorpUserSettings() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_USER_URN))).thenReturn(true); UpdateUserSettingResolver resolver = new UpdateUserSettingResolver(mockService); @@ -40,6 +40,6 @@ public void testWriteCorpUserSettings() throws Exception { final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_USER_URN), CORP_USER_SETTINGS_ASPECT_NAME, newSettings); - verifyIngestProposal(mockService, 1, proposal); + verifySingleIngestProposal(mockService, 1, proposal); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 9bdb3c1db2b0a..efc0c5dfcf36d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -13,6 +13,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -30,7 +31,7 @@ public class AddOwnersResolverTest { @Test public void testGetSuccessNoExistingOwners() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -75,7 +76,7 @@ public void testGetSuccessNoExistingOwners() throws Exception { @Test public void testGetSuccessExistingOwners() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -120,7 +121,7 @@ public void testGetSuccessExistingOwners() throws Exception { @Test public void testGetFailureOwnerDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -148,7 +149,7 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -176,7 +177,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -195,10 +196,10 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index e38eb9eb677c2..79fc62742f444 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -17,6 +17,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -35,7 +36,7 @@ public class BatchAddOwnersResolverTest { @Test public void testGetSuccessNoExistingOwners() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -82,7 +83,7 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 2); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) @@ -98,7 +99,7 @@ public void testGetSuccessExistingOwners() throws Exception { final Ownership originalOwnership = new Ownership().setOwners(new OwnerArray(ImmutableList.of( new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) ))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -150,7 +151,7 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 2); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) @@ -163,7 +164,7 @@ public void testGetSuccessExistingOwners() throws Exception { @Test public void testGetFailureOwnerDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -202,7 +203,7 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -247,7 +248,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -277,10 +278,10 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index 0884d442ea531..9dc2ec8127806 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -14,6 +14,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchRemoveOwnersResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -32,7 +33,7 @@ public class BatchRemoveOwnersResolverTest { @Test public void testGetSuccessNoExistingOwners() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -66,12 +67,12 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 2); + verifyIngestProposal(mockService, 1); } @Test public void testGetSuccessExistingOwners() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); final Ownership oldOwners1 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) @@ -112,12 +113,12 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 2); + verifyIngestProposal(mockService, 1); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -152,7 +153,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -172,10 +173,10 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 06d9df3278847..268d6a6bc4268 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -13,6 +13,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.AddTagsResolver; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; @@ -32,7 +33,7 @@ public class AddTagsResolverTest { @Test public void testGetSuccessNoExistingTags() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -82,7 +83,7 @@ public void testGetSuccessExistingTags() throws Exception { new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) ); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -128,7 +129,7 @@ public void testGetSuccessExistingTags() throws Exception { @Test public void testGetFailureTagDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -156,7 +157,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -184,7 +185,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -203,10 +204,10 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); AddTagsResolver resolver = new AddTagsResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index d47e6164fe221..651b89359c83f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -15,8 +15,11 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; + +import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -35,7 +38,7 @@ public class BatchAddTagsResolverTest { @Test public void testGetSuccessNoExistingTags() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -78,13 +81,10 @@ public void testGetSuccessNoExistingTags() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); - - verifyIngestProposal(mockService, 1, proposal1); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) @@ -101,7 +101,7 @@ public void testGetSuccessExistingTags() throws Exception { new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) ); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -143,13 +143,10 @@ public void testGetSuccessExistingTags() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); - - verifyIngestProposal(mockService, 1, proposal1); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) @@ -162,7 +159,7 @@ public void testGetSuccessExistingTags() throws Exception { @Test public void testGetFailureTagDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -188,13 +185,13 @@ public void testGetFailureTagDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -227,13 +224,13 @@ public void testGetFailureResourceDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -251,16 +248,16 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index 44160cfbe1273..f302540eba904 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -16,10 +16,12 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.Collections; +import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -38,7 +40,7 @@ public class BatchRemoveTagsResolverTest { @Test public void testGetSuccessNoExistingTags() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -76,12 +78,6 @@ public void testGetSuccessNoExistingTags() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); - - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -90,12 +86,12 @@ public void testGetSuccessNoExistingTags() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(emptyTags)); proposal2.setChangeType(ChangeType.UPSERT); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetSuccessExistingTags() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); final GlobalTags oldTags1 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), @@ -143,21 +139,15 @@ public void testGetSuccessExistingTags() throws Exception { final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); - - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); - verifyIngestProposal(mockService, 1, proposal2); + verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -190,13 +180,13 @@ public void testGetFailureResourceDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); @@ -214,16 +204,16 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java index d294f806d1af7..f801daf4f2a3f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java @@ -31,7 +31,7 @@ public class CreateTagResolverTest { @Test public void testGetSuccess() throws Exception { // Create resolver - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockClient.ingestProposal(Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) .thenReturn(String.format("urn:li:tag:%s", TEST_INPUT.getId())); @@ -64,7 +64,7 @@ public void testGetSuccess() throws Exception { @Test public void testGetUnauthorized() throws Exception { // Create resolver - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); @@ -83,7 +83,7 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { // Create resolver - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RuntimeException.class).when(mockClient).ingestProposal( Mockito.any(), diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java index da474ca3e0e56..b5bbf0775a8ba 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java @@ -37,7 +37,7 @@ public class SetTagColorResolverTest { public void testGetSuccessExistingProperties() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); @@ -78,7 +78,7 @@ public void testGetSuccessExistingProperties() throws Exception { public void testGetFailureNoExistingProperties() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); // Test setting the domain Mockito.when(mockService.getAspect( @@ -127,7 +127,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { Constants.TAG_PROPERTIES_ASPECT_NAME, oldTagPropertiesAspect))))); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -148,7 +148,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index c9ec92001f89b..213d21fd35dc1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -13,7 +13,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.AddTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -31,7 +31,7 @@ public class AddTermsResolverTest { @Test public void testGetSuccessNoExistingTerms() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -58,7 +58,7 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false) ); @@ -77,7 +77,7 @@ public void testGetSuccessExistingTerms() throws Exception { new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)))) ); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -104,7 +104,7 @@ public void testGetSuccessExistingTerms() throws Exception { // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false) ); @@ -119,7 +119,7 @@ public void testGetSuccessExistingTerms() throws Exception { @Test public void testGetFailureTermDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -143,13 +143,13 @@ public void testGetFailureTermDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), @@ -173,13 +173,13 @@ public void testGetFailureResourceDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -194,16 +194,16 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); AddTermsResolver resolver = new AddTermsResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index dfe1394635c4e..8887bb452b478 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -14,6 +14,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchAddTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -32,7 +33,7 @@ public class BatchAddTermsResolverTest { @Test public void testGetSuccessNoExistingTerms() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -66,7 +67,7 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 2); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) @@ -83,7 +84,7 @@ public void testGetSuccessExistingTerms() throws Exception { new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN)))) ); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -118,7 +119,7 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 2); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) @@ -131,7 +132,7 @@ public void testGetSuccessExistingTerms() throws Exception { @Test public void testGetFailureTagDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -160,7 +161,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -196,7 +197,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -217,10 +218,10 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index dcc8659c1baf3..995a4acb8a467 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -14,6 +14,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchRemoveTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -32,7 +33,7 @@ public class BatchRemoveTermsResolverTest { @Test public void testGetSuccessNoExistingTerms() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -66,12 +67,12 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 2); + verifyIngestProposal(mockService, 1); } @Test public void testGetSuccessExistingTerms() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); final GlossaryTerms oldTerms1 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), @@ -115,12 +116,12 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - verifyIngestProposal(mockService, 2); + verifyIngestProposal(mockService, 1); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.when(mockService.getAspect( Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), @@ -157,7 +158,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -179,10 +180,10 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(), + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); diff --git a/datahub-graphql-core/src/test/resources/test-entity-registry.yaml b/datahub-graphql-core/src/test/resources/test-entity-registry.yaml new file mode 100644 index 0000000000000..d694ae53ac42f --- /dev/null +++ b/datahub-graphql-core/src/test/resources/test-entity-registry.yaml @@ -0,0 +1,295 @@ +entities: +- name: dataPlatform + category: core + keyAspect: dataPlatformKey + aspects: + - dataPlatformInfo +- name: dataset + doc: Datasets represent logical or physical data assets stored or represented in various data platforms. Tables, Views, Streams are all instances of datasets. + category: core + keyAspect: datasetKey + aspects: + - viewProperties + - subTypes + - datasetProfile + - datasetUsageStatistics + - operation + - domains + - status + - container + - deprecation + - testResults + - siblings + - embed + - ownership + - glossaryTerms + - globalTags +- name: dataHubPolicy + doc: DataHub Policies represent access policies granted to users or groups on metadata operations like edit, view etc. + category: internal + keyAspect: dataHubPolicyKey + aspects: + - dataHubPolicyInfo +- name: dataJob + keyAspect: dataJobKey + aspects: + - datahubIngestionRunSummary + - datahubIngestionCheckpoint + - domains + - deprecation + - versionInfo +- name: dataFlow + category: core + keyAspect: dataFlowKey + aspects: + - domains + - deprecation + - versionInfo +- name: dataProcessInstance + doc: DataProcessInstance represents an instance of a datajob/jobflow run + keyAspect: dataProcessInstanceKey + aspects: + - dataProcessInstanceInput + - dataProcessInstanceOutput + - dataProcessInstanceProperties + - dataProcessInstanceRelationships + - dataProcessInstanceRunEvent +- name: chart + keyAspect: chartKey + aspects: + - domains + - container + - deprecation + - chartUsageStatistics + - embed +- name: dashboard + keyAspect: dashboardKey + aspects: + - domains + - container + - deprecation + - dashboardUsageStatistics + - subTypes + - embed +- name: notebook + doc: Notebook represents a combination of query, text, chart and etc. This is in BETA version + keyAspect: notebookKey + aspects: + - notebookInfo + - notebookContent + - editableNotebookProperties + - ownership + - status + - globalTags + - glossaryTerms + - browsePaths + - institutionalMemory + - domains + - subTypes + - dataPlatformInstance +- name: corpuser + doc: CorpUser represents an identity of a person (or an account) in the enterprise. + keyAspect: corpUserKey + aspects: + - corpUserInfo + - corpUserEditableInfo + - corpUserStatus + - groupMembership + - globalTags + - status + - corpUserCredentials + - nativeGroupMembership + - corpUserSettings + - origin + - roleMembership +- name: corpGroup + doc: CorpGroup represents an identity of a group of users in the enterprise. + keyAspect: corpGroupKey + aspects: + - corpGroupInfo + - corpGroupEditableInfo + - globalTags + - ownership + - status + - origin +- name: domain + doc: A data domain within an organization. + keyAspect: domainKey + aspects: + - domainProperties + - institutionalMemory + - ownership +- name: container + doc: A container of related data assets. + keyAspect: containerKey + aspects: + - containerProperties + - editableContainerProperties + - dataPlatformInstance + - subTypes + - ownership + - container + - globalTags + - glossaryTerms + - institutionalMemory + - browsePaths # unclear if this will be used + - status + - domains +- name: tag + keyAspect: tagKey + aspects: + - tagProperties + - ownership + - deprecation +- name: glossaryTerm + keyAspect: glossaryTermKey + aspects: + - glossaryTermInfo + - institutionalMemory + - ownership + - deprecation + - domains +- name: glossaryNode + keyAspect: glossaryNodeKey + aspects: + - glossaryNodeInfo + - institutionalMemory + - ownership + - status +- name: dataHubIngestionSource + category: internal + keyAspect: dataHubIngestionSourceKey + aspects: + - dataHubIngestionSourceInfo +- name: dataHubSecret + category: internal + keyAspect: dataHubSecretKey + aspects: + - dataHubSecretValue +- name: dataHubExecutionRequest + category: internal + keyAspect: dataHubExecutionRequestKey + aspects: + - dataHubExecutionRequestInput + - dataHubExecutionRequestSignal + - dataHubExecutionRequestResult +- name: assertion + doc: Assertion represents a data quality rule applied on one or more dataset. + category: core + keyAspect: assertionKey + aspects: + - assertionInfo + - dataPlatformInstance + - assertionRunEvent + - status +- name: dataHubRetention + category: internal + keyAspect: dataHubRetentionKey + aspects: + - dataHubRetentionConfig +- name: dataPlatformInstance + category: internal + keyAspect: dataPlatformInstanceKey + aspects: + - dataPlatformInstanceProperties + - ownership + - globalTags + - institutionalMemory + - deprecation + - status +- name: mlModel + category: core + keyAspect: mlModelKey + aspects: + - glossaryTerms + - editableMlModelProperties + - domains +- name: mlModelGroup + category: core + keyAspect: mlModelGroupKey + aspects: + - glossaryTerms + - editableMlModelGroupProperties + - domains +- name: mlFeatureTable + category: core + keyAspect: mlFeatureTableKey + aspects: + - glossaryTerms + - editableMlFeatureTableProperties + - domains +- name: mlFeature + category: core + keyAspect: mlFeatureKey + aspects: + - glossaryTerms + - editableMlFeatureProperties + - domains +- name: mlPrimaryKey + category: core + keyAspect: mlPrimaryKeyKey + aspects: + - glossaryTerms + - editableMlPrimaryKeyProperties + - domains +- name: telemetry + category: internal + keyAspect: telemetryKey + aspects: + - telemetryClientId +- name: dataHubAccessToken + category: internal + keyAspect: dataHubAccessTokenKey + aspects: + - dataHubAccessTokenInfo +- name: test + doc: A DataHub test + category: core + keyAspect: testKey + aspects: + - testInfo +- name: dataHubUpgrade + category: internal + keyAspect: dataHubUpgradeKey + aspects: + - dataHubUpgradeRequest + - dataHubUpgradeResult +- name: inviteToken + category: core + keyAspect: inviteTokenKey + aspects: + - inviteToken +- name: globalSettings + doc: Global settings for an the platform + category: internal + keyAspect: globalSettingsKey + aspects: + - globalSettingsInfo +- name: dataHubRole + category: core + keyAspect: dataHubRoleKey + aspects: + - dataHubRoleInfo +- name: post + category: core + keyAspect: postKey + aspects: + - postInfo +- name: dataHubStepState + category: core + keyAspect: dataHubStepStateKey + aspects: + - dataHubStepStateProperties +- name: dataHubView + category: core + keyAspect: dataHubViewKey + aspects: + - dataHubViewInfo +- name: ownershipType + doc: Ownership Type represents a user-created ownership category for a person or group who is responsible for an asset. + category: core + keyAspect: ownershipTypeKey + aspects: + - ownershipTypeInfo + - status +events: diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 625cab6b354c5..5d0edf3ee8427 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -100,7 +100,7 @@ docker { load(true) push(false) } -tasks.getByName("docker").dependsOn([bootJar]) +tasks.getByPath(":datahub-upgrade:docker").dependsOn([bootJar]) task cleanLocalDockerImages { doLast { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java index 23ce409c746d0..0fb8b0eb6e20f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java @@ -3,7 +3,7 @@ import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; -import io.ebean.EbeanServer; +import io.ebean.Database; import javax.annotation.Nonnull; import org.elasticsearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; @@ -25,7 +25,7 @@ public class NoCodeCleanupConfig { @DependsOn({"ebeanServer", "graphService", "elasticSearchRestHighLevelClient", INDEX_CONVENTION_BEAN}) @Nonnull public NoCodeCleanupUpgrade createInstance() { - final EbeanServer ebeanServer = applicationContext.getBean(EbeanServer.class); + final Database ebeanServer = applicationContext.getBean(Database.class); final GraphService graphClient = applicationContext.getBean(GraphService.class); final RestHighLevelClient searchClient = applicationContext.getBean(RestHighLevelClient.class); final IndexConvention indexConvention = applicationContext.getBean(IndexConvention.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index 39b3daa73b78f..30175c6fa78c8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -5,7 +5,7 @@ import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; -import io.ebean.EbeanServer; +import io.ebean.Database; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; @@ -24,7 +24,7 @@ public class NoCodeUpgradeConfig { @DependsOn({"ebeanServer", "entityService", "systemAuthentication", "restliEntityClient", "entityRegistry"}) @Nonnull public NoCodeUpgrade createInstance() { - final EbeanServer ebeanServer = applicationContext.getBean(EbeanServer.class); + final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); final Authentication systemAuthentication = applicationContext.getBean(Authentication.class); final RestliEntityClient entityClient = applicationContext.getBean(RestliEntityClient.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index ebff7f4b899ad..9b0fcf279abf5 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -7,7 +7,7 @@ import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; -import io.ebean.EbeanServer; +import io.ebean.Database; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; @@ -26,7 +26,7 @@ public class RestoreBackupConfig { "searchService", "entityRegistry"}) @Nonnull public RestoreBackup createInstance() { - final EbeanServer ebeanServer = applicationContext.getBean(EbeanServer.class); + final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); final Authentication systemAuthentication = applicationContext.getBean(Authentication.class); final RestliEntityClient entityClient = applicationContext.getBean(RestliEntityClient.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index ee907005168b8..663cad4a4bff6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -5,7 +5,7 @@ import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; -import io.ebean.EbeanServer; +import io.ebean.Database; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; @@ -23,7 +23,7 @@ public class RestoreIndicesConfig { @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) @Nonnull public RestoreIndices createInstance() { - final EbeanServer ebeanServer = applicationContext.getBean(EbeanServer.class); + final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); final EntitySearchService entitySearchService = applicationContext.getBean(EntitySearchService.class); final GraphService graphService = applicationContext.getBean(GraphService.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java index 3b78e95a7b751..7ed7169bf20bc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java @@ -4,7 +4,7 @@ import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.function.Function; public class CreateAspectTableStep implements UpgradeStep { @@ -17,9 +17,9 @@ enum DbType { MARIA } - private final EbeanServer _server; + private final Database _server; - public CreateAspectTableStep(final EbeanServer server) { + public CreateAspectTableStep(final Database server) { _server = server; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java index 6553bb80bb1fa..1b5770a11ff62 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java @@ -17,10 +17,12 @@ import com.linkedin.metadata.entity.ebean.EbeanAspectV1; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.models.EntitySpec; -import io.ebean.EbeanServer; +import com.linkedin.util.Pair; +import io.ebean.Database; import io.ebean.PagedList; import java.net.URISyntaxException; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; @@ -35,13 +37,13 @@ public class DataMigrationStep implements UpgradeStep { private static final String BROWSE_PATHS_ASPECT_NAME = PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); - private final EbeanServer _server; + private final Database _server; private final EntityService _entityService; private final EntityRegistry _entityRegistry; private final Set urnsWithBrowsePath = new HashSet<>(); public DataMigrationStep( - final EbeanServer server, + final Database server, final EntityService entityService, final EntityRegistry entityRegistry) { _server = server; @@ -132,15 +134,11 @@ public Function executable() { // 6. Write the row back using the EntityService boolean emitMae = oldAspect.getKey().getVersion() == 0L; - _entityService.updateAspect( + _entityService.ingestAspects( urn, - entityName, - newAspectName, - aspectSpec, - aspectRecord, + List.of(Pair.of(newAspectName, aspectRecord)), toAuditStamp(oldAspect), - oldAspect.getKey().getVersion(), - emitMae + null ); // 7. If necessary, emit a browse path aspect. @@ -154,7 +152,7 @@ public Function executable() { browsePathsStamp.setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)); browsePathsStamp.setTime(System.currentTimeMillis()); - _entityService.ingestAspect(urn, BROWSE_PATHS_ASPECT_NAME, browsePaths, browsePathsStamp, null); + _entityService.ingestAspects(urn, List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), browsePathsStamp, null); urnsWithBrowsePath.add(urn); } catch (URISyntaxException e) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index c12ff201faf22..ee4a3bc504e77 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -10,7 +10,7 @@ import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -25,9 +25,9 @@ public class NoCodeUpgrade implements Upgrade { private final List _steps; private final List _cleanupSteps; - // Upgrade requires the EbeanServer. + // Upgrade requires the Database. public NoCodeUpgrade( - final EbeanServer server, + final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final Authentication systemAuthentication, @@ -60,7 +60,7 @@ private List buildCleanupSteps() { } private List buildUpgradeSteps( - final EbeanServer server, + final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final Authentication systemAuthentication, diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java index 440884470463d..cf8e848762f14 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java @@ -4,7 +4,7 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.function.Function; @@ -13,9 +13,9 @@ */ public class RemoveAspectV2TableStep implements UpgradeStep { - private final EbeanServer _server; + private final Database _server; - public RemoveAspectV2TableStep(final EbeanServer server) { + public RemoveAspectV2TableStep(final Database server) { _server = server; } @@ -28,7 +28,7 @@ public String id() { public Function executable() { return (context) -> { context.report().addLine("Cleanup requested. Dropping metadata_aspect_v2"); - _server.execute(_server.createSqlUpdate("DROP TABLE IF EXISTS metadata_aspect_v2")); + _server.execute(_server.sqlUpdate("DROP TABLE IF EXISTS metadata_aspect_v2")); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java index ec05f210f0132..0fe9afa8cc6f8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java @@ -5,14 +5,14 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.metadata.entity.ebean.AspectStorageValidationUtil; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.function.Function; public class UpgradeQualificationStep implements UpgradeStep { - private final EbeanServer _server; + private final Database _server; - UpgradeQualificationStep(EbeanServer server) { + UpgradeQualificationStep(Database server) { _server = server; } @@ -52,7 +52,7 @@ public Function executable() { } // Check whether the upgrade is needed - private boolean isQualified(EbeanServer server, UpgradeContext context) { + private boolean isQualified(Database server, UpgradeContext context) { boolean v1TableExists = AspectStorageValidationUtil.checkV1TableExists(server); if (v1TableExists) { context.report().addLine("-- V1 table exists"); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java index 2d435cdc28a6b..8005e31e01c67 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java @@ -4,16 +4,16 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.function.Function; // Do we need SQL-tech specific migration paths? public class DeleteAspectTableStep implements UpgradeStep { - private final EbeanServer _server; + private final Database _server; - public DeleteAspectTableStep(final EbeanServer server) { + public DeleteAspectTableStep(final Database server) { _server = server; } @@ -31,7 +31,7 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _server.execute(_server.createSqlUpdate("DROP TABLE IF EXISTS metadata_aspect;")); + _server.execute(_server.sqlUpdate("DROP TABLE IF EXISTS metadata_aspect;")); } catch (Exception e) { context.report().addLine("Failed to delete data from legacy table metadata_aspect", e); return new DefaultUpgradeStepResult( diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java index c9a13c2208a56..2b5e23c5f8269 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java @@ -5,7 +5,7 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -17,8 +17,8 @@ public class NoCodeCleanupUpgrade implements Upgrade { private final List _steps; private final List _cleanupSteps; - // Upgrade requires the EbeanServer. - public NoCodeCleanupUpgrade(final EbeanServer server, final GraphService graphClient, + // Upgrade requires the Database. + public NoCodeCleanupUpgrade(final Database server, final GraphService graphClient, final RestHighLevelClient searchClient, final IndexConvention indexConvention) { _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); _cleanupSteps = buildCleanupSteps(); @@ -43,7 +43,7 @@ private List buildCleanupSteps() { return Collections.emptyList(); } - private List buildUpgradeSteps(final EbeanServer server, final GraphService graphClient, + private List buildUpgradeSteps(final Database server, final GraphService graphClient, final RestHighLevelClient searchClient, final IndexConvention indexConvention) { final List steps = new ArrayList<>(); steps.add(new NoCodeUpgradeQualificationStep(server)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java index 52e299d68b45a..67a226f8f0676 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java @@ -5,15 +5,15 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.entity.ebean.AspectStorageValidationUtil; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.function.Function; public class NoCodeUpgradeQualificationStep implements UpgradeStep { - private final EbeanServer _server; + private final Database _server; - NoCodeUpgradeQualificationStep(EbeanServer server) { + NoCodeUpgradeQualificationStep(Database server) { _server = server; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java index 711cccf742254..0303739e62afe 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java @@ -5,7 +5,7 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.function.Function; @@ -14,9 +14,9 @@ */ public class ClearAspectV2TableStep implements UpgradeStep { - private final EbeanServer _server; + private final Database _server; - public ClearAspectV2TableStep(final EbeanServer server) { + public ClearAspectV2TableStep(final Database server) { _server = server; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index a9dfa948c7873..67718a6739beb 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -14,7 +14,7 @@ import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.ArrayList; import java.util.List; @@ -24,7 +24,7 @@ public class RestoreBackup implements Upgrade { private final List _steps; public RestoreBackup( - final EbeanServer server, + final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final Authentication systemAuthentication, @@ -45,7 +45,7 @@ public List steps() { } private List buildSteps( - final EbeanServer server, + final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final Authentication systemAuthentication, diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index f22a52c4877f6..42f7f0073e59b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -20,6 +20,8 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.util.Pair; + import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.util.ArrayList; @@ -181,8 +183,7 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { final long version = aspect.getKey().getVersion(); final AuditStamp auditStamp = toAuditStamp(aspect); futureList.add(_gmsThreadPool.submit(() -> - _entityService.updateAspect(urn, entityName, aspectName, aspectSpec, aspectRecord, auditStamp, - version, version == 0L))); + _entityService.ingestAspects(urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null).get(0).getNewValue())); if (numRows % REPORT_BATCH_SIZE == 0) { for (Future future : futureList) { try { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index 9e11a953079a5..ee6a5ed6f1536 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -10,7 +10,7 @@ import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.ArrayList; import java.util.List; @@ -27,7 +27,7 @@ public class RestoreIndices implements Upgrade { private final List _steps; - public RestoreIndices(final EbeanServer server, final EntityService entityService, + public RestoreIndices(final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, final GraphService graphService) { _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); @@ -43,7 +43,7 @@ public List steps() { return _steps; } - private List buildSteps(final EbeanServer server, final EntityService entityService, + private List buildSteps(final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, final GraphService graphService) { final List steps = new ArrayList<>(); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index ac2457732771d..ce39b3fb562af 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -9,7 +9,7 @@ import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import com.linkedin.metadata.models.registry.EntityRegistry; -import io.ebean.EbeanServer; +import io.ebean.Database; import io.ebean.ExpressionList; import java.util.ArrayList; @@ -32,7 +32,7 @@ public class SendMAEStep implements UpgradeStep { private static final long DEFAULT_BATCH_DELAY_MS = 250; private static final int DEFAULT_THREADS = 1; - private final EbeanServer _server; + private final Database _server; private final EntityService _entityService; public class KafkaJob implements Callable { @@ -48,7 +48,7 @@ public RestoreIndicesResult call() { } } - public SendMAEStep(final EbeanServer server, final EntityService entityService, final EntityRegistry entityRegistry) { + public SendMAEStep(final Database server, final EntityService entityService, final EntityRegistry entityRegistry) { _server = server; _entityService = entityService; } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index fefc853be8c0b..b1bdead58a72b 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -6,7 +6,7 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; -import io.ebean.EbeanServer; +import io.ebean.Database; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.context.annotation.Import; @@ -19,7 +19,7 @@ public class UpgradeCliApplicationTestConfiguration { private UpgradeCli upgradeCli; @MockBean - private EbeanServer ebeanServer; + private Database ebeanServer; @MockBean private EntityService _entityService; diff --git a/docs/deploy/environment-vars.md b/docs/deploy/environment-vars.md index af4ae09c009fd..a09db7bc6b82e 100644 --- a/docs/deploy/environment-vars.md +++ b/docs/deploy/environment-vars.md @@ -19,7 +19,7 @@ DataHub works. | Variable | Default | Unit/Type | Components | Description | |------------------------------------|---------|-----------|-------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `ASYNC_INGESTION_DEFAULT` | `false` | boolean | [`GMS`] | Asynchronously process ingestProposals by writing the ingestion MCP to Kafka. Typically enabled with standalone consumers. | +| `ASYNC_INGEST_DEFAULT` | `false` | boolean | [`GMS`] | Asynchronously process ingestProposals by writing the ingestion MCP to Kafka. Typically enabled with standalone consumers. | | `MCP_CONSUMER_ENABLED` | `true` | boolean | [`GMS`, `MCE Consumer`] | When running in standalone mode, disabled on `GMS` and enabled on separate `MCE Consumer`. | | `MCL_CONSUMER_ENABLED` | `true` | boolean | [`GMS`, `MAE Consumer`] | When running in standalone mode, disabled on `GMS` and enabled on separate `MAE Consumer`. | | `PE_CONSUMER_ENABLED` | `true` | boolean | [`GMS`, `MAE Consumer`] | When running in standalone mode, disabled on `GMS` and enabled on separate `MAE Consumer`. | diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java index 65bf250200d13..00b5bb75d901b 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java @@ -1,25 +1,18 @@ package com.linkedin.metadata.dao.producer; import com.datahub.util.exception.ModelConversionException; -import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.EventUtils; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; -import com.linkedin.metadata.snapshot.Snapshot; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; -import com.linkedin.mxe.MetadataAuditEvent; -import com.linkedin.mxe.MetadataAuditOperation; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; -import com.linkedin.mxe.SystemMetadata; import com.linkedin.mxe.TopicConvention; import com.linkedin.mxe.TopicConventionImpl; -import com.linkedin.mxe.Topics; import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; -import java.util.Arrays; import java.util.concurrent.Future; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -55,45 +48,6 @@ public KafkaEventProducer(@Nonnull final Producer produceMetadataChangeLog(@Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, @@ -120,7 +74,7 @@ record = EventUtils.pegasusToAvroMCL(metadataChangeLog); @Override @WithSpan public Future produceMetadataChangeProposal(@Nonnull final Urn urn, - @Nonnull final MetadataChangeProposal metadataChangeProposal) { + @Nonnull final MetadataChangeProposal metadataChangeProposal) { GenericRecord record; try { @@ -171,9 +125,4 @@ record = EventUtils.pegasusToAvroDUHE(event); _producer.send(new ProducerRecord(topic, event.getVersion(), record), _kafkaHealthChecker .getKafkaCallBack("History Event", "Event Version: " + event.getVersion())); } - - @VisibleForTesting - static boolean isValidAspectSpecificTopic(@Nonnull String topic) { - return Arrays.stream(Topics.class.getFields()).anyMatch(field -> field.getName().equals(topic)); - } } diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index e8ef0b3d6819d..d2b584ceb6745 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -38,6 +38,7 @@ dependencies { api externalDependency.kafkaClients api externalDependency.ebean enhance externalDependency.ebeanAgent + implementation externalDependency.ebeanDdl implementation externalDependency.opentelemetryAnnotations implementation externalDependency.resilience4j api externalDependency.springContext @@ -71,6 +72,7 @@ dependencies { // logback >=1.3 required due to `testcontainers` only testImplementation 'ch.qos.logback:logback-classic:1.4.7' + testImplementation 'net.datafaker:datafaker:1.9.0' testAnnotationProcessor externalDependency.lombok diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 35ed8c7905169..a69c6008fea47 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -25,6 +25,9 @@ import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.IngestResult; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.query.AutoCompleteResult; @@ -60,6 +63,7 @@ import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; @@ -535,8 +539,16 @@ public String ingestProposal(@Nonnull final MetadataChangeProposal metadataChang final List additionalChanges = AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService); - Urn urn = _entityService.ingestProposal(metadataChangeProposal, auditStamp, async).getUrn(); - additionalChanges.forEach(proposal -> _entityService.ingestProposal(proposal, auditStamp, async)); + Stream proposalStream = Stream.concat(Stream.of(metadataChangeProposal), + additionalChanges.stream()); + AspectsBatch batch = AspectsBatchImpl.builder() + .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) + .build(); + + IngestResult one = _entityService.ingestProposal(batch, auditStamp, async).stream() + .findFirst().get(); + + Urn urn = one.getUrn(); tryIndexRunId(urn, metadataChangeProposal.getSystemMetadata()); return urn.toString(); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java index 9967df9207ec7..2d5c5e23ae528 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java @@ -3,8 +3,10 @@ import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.utils.metrics.MetricUtils; import io.ebean.PagedList; +import io.ebean.Transaction; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -12,6 +14,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import java.util.function.Supplier; /** @@ -45,37 +48,45 @@ public interface AspectDao { List getAspectsInRange(@Nonnull Urn urn, Set aspectNames, long startTimeMillis, long endTimeMillis); @Nullable - EntityAspect getLatestAspect(@Nonnull final String urn, @Nonnull final String aspectName); + default EntityAspect getLatestAspect(@Nonnull final String urn, @Nonnull final String aspectName) { + return getLatestAspects(Map.of(urn, Set.of(aspectName))).getOrDefault(urn, Map.of()) + .getOrDefault(aspectName, null); + } + + @Nonnull + Map> getLatestAspects(Map> urnAspects); void saveAspect( - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert); + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert); - void saveAspect(@Nonnull final EntityAspect aspect, final boolean insert); + void saveAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert); long saveLatestAspect( - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion); - - void deleteAspect(@Nonnull final EntityAspect aspect); + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion); + + void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect); @Nonnull ListResult listUrns( @@ -92,7 +103,7 @@ Integer countAspect( @Nonnull PagedList getPagedAspects(final RestoreIndicesArgs args); - int deleteUrn(@Nonnull final String urn); + int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn); @Nonnull ListResult listLatestAspectMetadata( @@ -109,16 +120,28 @@ ListResult listAspectMetadata( final int start, final int pageSize); - long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName); + Map> getNextVersions(@Nonnull Map> urnAspectMap); + + default long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName) { + return getNextVersions(urn, Set.of(aspectName)).get(aspectName); + } - Map getNextVersions(@Nonnull final String urn, @Nonnull final Set aspectNames); + default Map getNextVersions(@Nonnull final String urn, @Nonnull final Set aspectNames) { + return getNextVersions(Map.of(urn, aspectNames)).get(urn); + } long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName); void setWritable(boolean canWrite); @Nonnull - T runInTransactionWithRetry(@Nonnull final Supplier block, final int maxTransactionRetry); + T runInTransactionWithRetry(@Nonnull final Function block, final int maxTransactionRetry); + + @Nonnull + default T runInTransactionWithRetry(@Nonnull final Function block, AspectsBatch batch, + final int maxTransactionRetry) { + return runInTransactionWithRetry(block, maxTransactionRetry); + } default void incrementWriteMetrics(String aspectName, long count, long bytes) { MetricUtils.counter(this.getClass(), diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index e070944b49a05..03b81cfc703c4 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -4,14 +4,6 @@ import com.linkedin.metadata.config.PreProcessHooks; import com.datahub.util.RecordUtils; import com.datahub.util.exception.ModelConversionException; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.StreamReadConstraints; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.github.fge.jsonpatch.JsonPatch; -import com.github.fge.jsonpatch.JsonPatchException; -import com.github.fge.jsonpatch.Patch; -import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterators; @@ -25,10 +17,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.common.urn.VersionedUrnUtils; -import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.data.schema.TyperefDataSchema; -import com.linkedin.data.schema.validation.ValidationResult; -import com.linkedin.data.schema.validator.Validator; import com.linkedin.data.template.DataTemplateUtil; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringArray; @@ -45,19 +34,20 @@ import com.linkedin.metadata.aspect.Aspect; import com.linkedin.metadata.aspect.VersionedAspect; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.transactions.AbstractBatchItem; +import com.linkedin.metadata.entity.ebean.transactions.PatchBatchItem; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; -import com.linkedin.metadata.entity.validation.EntityRegistryUrnValidator; -import com.linkedin.metadata.entity.validation.RecordTemplateValidator; -import com.linkedin.metadata.entity.validation.ValidationUtils; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.RelationshipFieldSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.run.AspectRowSummary; import com.linkedin.metadata.search.utils.BrowsePathV2Utils; @@ -74,9 +64,8 @@ import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; import io.ebean.PagedList; -import java.io.IOException; + import java.net.URISyntaxException; -import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; import java.util.ArrayList; @@ -84,19 +73,24 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.concurrent.TimeUnit; -import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.persistence.EntityNotFoundException; + +import io.ebean.Transaction; import lombok.extern.slf4j.Slf4j; import static com.linkedin.metadata.Constants.*; @@ -126,9 +120,9 @@ * will have version 4. The "true" latest version of an aspect is always equal to the highest stored version * of a given aspect + 1. * - * Note that currently, implementations of this interface are responsible for producing Metadata Audit Events on - * ingestion using {@link #produceMetadataChangeLog(Urn, String, String, AspectSpec, RecordTemplate, RecordTemplate, - * SystemMetadata, SystemMetadata, AuditStamp, ChangeType)}. + * Note that currently, implementations of this interface are responsible for producing Metadata Change Log on + * ingestion using {@link #conditionallyProduceMCLAsync(RecordTemplate, SystemMetadata, RecordTemplate, SystemMetadata, + * MetadataChangeProposal, Urn, AuditStamp, AspectSpec)}. * * TODO: Consider whether we can abstract away virtual versioning semantics to subclasses of this class. */ @@ -140,13 +134,7 @@ public class EntityServiceImpl implements EntityService { * monotonically increasing version incrementing as usual once the latest version is replaced. */ - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } - - private static final int DEFAULT_MAX_TRANSACTION_RETRY = 4; + private static final int DEFAULT_MAX_TRANSACTION_RETRY = 3; protected final AspectDao _aspectDao; private final EventProducer _producer; @@ -158,10 +146,6 @@ public class EntityServiceImpl implements EntityService { private final PreProcessHooks _preProcessHooks; protected static final int MAX_KEYS_PER_QUERY = 500; - private static final int URN_NUM_BYTES_LIMIT = 512; - - // TODO(iprentic): Move this to a common utils location once used in other places - private static final String DELIMITER_SEPARATOR = "␟"; private final Integer ebeanMaxTransactionRetry; @@ -194,8 +178,6 @@ public EntityServiceImpl( ebeanMaxTransactionRetry = retry != null ? retry : DEFAULT_MAX_TRANSACTION_RETRY; } - - /** * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided * set of aspect names. @@ -204,6 +186,7 @@ public EntityServiceImpl( * @param aspectNames aspects to fetch for each urn in urns set * @return a map of provided {@link Urn} to a List containing the requested aspects. */ + @Override public Map> getLatestAspects( @Nonnull final Set urns, @Nonnull final Set aspectNames) { @@ -220,7 +203,7 @@ public Map> getLatestAspects( // Add "key" aspects for each urn. TODO: Replace this with a materialized key aspect. urnToAspects.keySet().forEach(key -> { - final RecordTemplate keyAspect = buildKeyAspect(key); + final RecordTemplate keyAspect = EntityUtils.buildKeyAspect(_entityRegistry, key); urnToAspects.get(key).add(keyAspect); }); @@ -401,8 +384,7 @@ public Map> getVersionedEnvelopedAspects( .map(UrnUtils::getUrn).collect(Collectors.toSet())); } - private Map> getCorrespondingAspects(Set dbKeys, Set urns) - throws URISyntaxException { + private Map> getCorrespondingAspects(Set dbKeys, Set urns) { final Map envelopedAspectMap = getEnvelopedAspects(dbKeys); @@ -446,34 +428,6 @@ public EnvelopedAspect getLatestEnvelopedAspect( .orElse(null); } - /** - * Retrieves the specific version of the aspect for the given urn - * - * @param entityName name of the entity to fetch - * @param urn urn to fetch - * @param aspectName name of the aspect to fetch - * @param version version to fetch - * @return {@link EnvelopedAspect} object, or null if one cannot be found - */ - @Override - public EnvelopedAspect getEnvelopedAspect( - // TODO: entityName is only used for a debug statement, can we remove this as a param? - String entityName, - @Nonnull Urn urn, - @Nonnull String aspectName, - long version) throws Exception { - log.debug(String.format("Invoked getEnvelopedAspect with entityName: %s, urn: %s, aspectName: %s, version: %s", - urn.getEntityType(), - urn, - aspectName, - version)); - - version = calculateVersionNumber(urn, aspectName, version); - - final EntityAspectIdentifier primaryKey = new EntityAspectIdentifier(urn.toString(), aspectName, version); - return getEnvelopedAspects(ImmutableSet.of(primaryKey)).get(primaryKey); - } - /** * Retrieves an {@link VersionedAspect}, or null if one cannot be found. */ @@ -543,227 +497,192 @@ public ListResult listLatestAspects( aspectMetadataList.getPageSize()); } - - @Nonnull - protected UpdateAspectResult wrappedIngestAspectToLocalDB(@Nonnull final Urn urn, @Nonnull final String aspectName, - @Nonnull final Function, RecordTemplate> updateLambda, - @Nonnull final AuditStamp auditStamp, @Nonnull final SystemMetadata systemMetadata) { - validateUrn(urn); - validateAspect(urn, updateLambda.apply(null)); - return ingestAspectToLocalDB(urn, aspectName, updateLambda, auditStamp, systemMetadata); - } - - @Nonnull - private List> wrappedIngestAspectsToLocalDB(@Nonnull final Urn urn, - @Nonnull List> aspectRecordsToIngest, - @Nonnull final AuditStamp auditStamp, @Nonnull final SystemMetadata providedSystemMetadata) { - validateUrn(urn); - aspectRecordsToIngest.forEach(pair -> validateAspect(urn, pair.getSecond())); - return ingestAspectsToLocalDB(urn, aspectRecordsToIngest, auditStamp, providedSystemMetadata); - } - - // Validates urn subfields using EntityRegistryUrnValidator and does basic field validation for type alignment - // due to validator logic which inherently does coercion - private void validateAspect(Urn urn, RecordTemplate aspect) { - EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(_entityRegistry); - validator.setCurrentEntitySpec(_entityRegistry.getEntitySpec(urn.getEntityType())); - validateAspect(urn, aspect, validator); + /** + * Common batch-like pattern used primarily in tests. + * @param entityUrn the entity urn + * @param pairList list of aspects in pairs of aspect name and record template + * @param auditStamp audit stamp + * @param systemMetadata system metadata + * @return update result + */ + @Override + public List ingestAspects(@Nonnull Urn entityUrn, + List> pairList, + @Nonnull final AuditStamp auditStamp, + SystemMetadata systemMetadata) { + List items = pairList.stream() + .map(pair -> UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(pair.getKey()) + .aspect(pair.getValue()) + .systemMetadata(systemMetadata) + .build(_entityRegistry)) + .collect(Collectors.toList()); + return ingestAspects(AspectsBatchImpl.builder().items(items).build(), auditStamp, true, true); } - private void validateAspect(Urn urn, RecordTemplate aspect, Validator validator) { - Consumer resultFunction = validationResult -> { - throw new IllegalArgumentException("Invalid format for aspect: " + aspect + " for entity: " + urn + "\n Cause: " - + validationResult.getMessages()); }; - RecordTemplateValidator.validate(buildKeyAspect(urn), resultFunction, validator); - RecordTemplateValidator.validate(aspect, resultFunction, validator); - } /** - * Checks whether there is an actual update to the aspect by applying the updateLambda - * If there is an update, push the new version into the local DB. - * Otherwise, do not push the new version, but just update the system metadata. - * DO NOT CALL DIRECTLY, USE WRAPPED METHODS TO VALIDATE URN + * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataChangeLog}. * - * @param urn an urn associated with the new aspect - * @param aspectName name of the aspect being inserted - * @param updateLambda Function to apply to the latest version of the aspect to get the updated version - * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time * @param providedSystemMetadata - * @return Details about the new and old version of the aspect + * @param aspectsBatch aspects to write + * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time + * @param emitMCL whether a {@link com.linkedin.mxe.MetadataChangeLog} should be emitted in correspondence upon + * successful update + * @return the {@link RecordTemplate} representation of the written aspect object */ - @Nonnull - @Deprecated - protected UpdateAspectResult ingestAspectToLocalDB( - @Nonnull final Urn urn, - @Nonnull final String aspectName, - @Nonnull final Function, RecordTemplate> updateLambda, - @Nonnull final AuditStamp auditStamp, - @Nonnull final SystemMetadata providedSystemMetadata) { + @Override + public List ingestAspects(@Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean emitMCL, + boolean overwrite) { - return _aspectDao.runInTransactionWithRetry(() -> { - final String urnStr = urn.toString(); - final EntityAspect latest = _aspectDao.getLatestAspect(urnStr, aspectName); - long nextVersion = _aspectDao.getNextVersion(urnStr, aspectName); + Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); + List ingestResults = ingestAspectsToLocalDB(aspectsBatch, auditStamp, overwrite); + List mclResults = emitMCL(ingestResults, emitMCL); + ingestToLocalDBTimer.stop(); - return ingestAspectToLocalDBNoTransaction(urn, aspectName, updateLambda, auditStamp, providedSystemMetadata, latest, nextVersion); - }, DEFAULT_MAX_TRANSACTION_RETRY); + return mclResults; } /** - * Apply patch update to aspect within a single transaction + * Checks whether there is an actual update to the aspect by applying the updateLambda + * If there is an update, push the new version into the local DB. + * Otherwise, do not push the new version, but just update the system metadata. * - * @param urn an urn associated with the new aspect - * @param aspectSpec AspectSpec of the aspect to update - * @param jsonPatch JsonPatch to apply to the aspect - * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time * @param providedSystemMetadata + * @param aspectsBatch Collection of the following: an urn associated with the new aspect, name of the aspect being + * inserted, and a function to apply to the latest version of the aspect to get the updated version + * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time * @return Details about the new and old version of the aspect */ @Nonnull - @Deprecated - protected UpdateAspectResult patchAspectToLocalDB( - @Nonnull final Urn urn, - @Nonnull final AspectSpec aspectSpec, - @Nonnull final Patch jsonPatch, - @Nonnull final AuditStamp auditStamp, - @Nonnull final SystemMetadata providedSystemMetadata) { - - return _aspectDao.runInTransactionWithRetry(() -> { - final String urnStr = urn.toString(); - final String aspectName = aspectSpec.getName(); - final EntityAspect latest = _aspectDao.getLatestAspect(urnStr, aspectName); - final long nextVersion = _aspectDao.getNextVersion(urnStr, aspectName); - try { + private List ingestAspectsToLocalDB(@Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean overwrite) { - final RecordTemplate currentValue = latest != null - ? EntityUtils.toAspectRecord(urn, aspectName, latest.getMetadata(), _entityRegistry) - : _entityRegistry.getAspectTemplateEngine().getDefaultTemplate(aspectSpec.getName()); - - if (latest == null && currentValue == null) { - // Attempting to patch a value to an aspect which has no default value and no existing value. - throw new UnsupportedOperationException(String.format("Patch not supported for aspect with name %s. " - + "Default aspect is required because no aspect currently exists for urn %s.", aspectName, urn)); - } - - final RecordTemplate updatedValue = _entityRegistry.getAspectTemplateEngine().applyPatch(currentValue, jsonPatch, aspectSpec); + if (aspectsBatch.containsDuplicateAspects()) { + log.warn(String.format("Batch contains duplicates: %s", aspectsBatch)); + } - validateAspect(urn, updatedValue); - return ingestAspectToLocalDBNoTransaction(urn, aspectName, ignored -> updatedValue, auditStamp, providedSystemMetadata, - latest, nextVersion); - } catch (JsonProcessingException | JsonPatchException e) { - throw new IllegalStateException(e); + return _aspectDao.runInTransactionWithRetry((tx) -> { + // Read before write is unfortunate, however batch it + Map> urnAspects = aspectsBatch.getUrnAspectsMap(); + // read #1 + Map> latestAspects = _aspectDao.getLatestAspects(urnAspects); + // read #2 + Map> nextVersions = _aspectDao.getNextVersions(urnAspects); + + List items = aspectsBatch.getItems().stream() + .map(item -> { + if (item instanceof UpsertBatchItem) { + return (UpsertBatchItem) item; + } else { + // patch to upsert + PatchBatchItem patchBatchItem = (PatchBatchItem) item; + final String urnStr = patchBatchItem.getUrn().toString(); + final EntityAspect latest = latestAspects.getOrDefault(urnStr, Map.of()).get(patchBatchItem.getAspectName()); + final RecordTemplate currentValue = latest != null + ? EntityUtils.toAspectRecord(patchBatchItem.getUrn(), patchBatchItem.getAspectName(), latest.getMetadata(), _entityRegistry) : null; + return patchBatchItem.applyPatch(_entityRegistry, currentValue); + } + }) + .collect(Collectors.toList()); + + // Database Upsert results + List upsertResults = items.stream() + .map(item -> { + final String urnStr = item.getUrn().toString(); + final EntityAspect latest = latestAspects.getOrDefault(urnStr, Map.of()).get(item.getAspectName()); + final long nextVersion = nextVersions.getOrDefault(urnStr, Map.of()).getOrDefault(item.getAspectName(), 0L); + + final UpdateAspectResult result; + if (overwrite || latest == null) { + result = ingestAspectToLocalDB(tx, item.getUrn(), item.getAspectName(), item.getAspect(), + auditStamp, item.getSystemMetadata(), latest, nextVersion).toBuilder().request(item).build(); + + // support inner-batch upserts + latestAspects.computeIfAbsent(urnStr, key -> new HashMap<>()).put(item.getAspectName(), item.toLatestEntityAspect(auditStamp)); + nextVersions.computeIfAbsent(urnStr, key -> new HashMap<>()).put(item.getAspectName(), nextVersion + 1); + } else { + RecordTemplate oldValue = EntityUtils.toAspectRecord(item.getUrn().getEntityType(), item.getAspectName(), + latest.getMetadata(), getEntityRegistry()); + SystemMetadata oldMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + result = UpdateAspectResult.builder() + .urn(item.getUrn()) + .request(item) + .oldValue(oldValue) + .newValue(oldValue) + .oldSystemMetadata(oldMetadata) + .newSystemMetadata(oldMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(latest.getVersion()) + .build(); + } + + return result; + }).collect(Collectors.toList()); + + // commit upserts prior to retention or kafka send, if supported by impl + if (tx != null) { + tx.commitAndContinue(); } - }, DEFAULT_MAX_TRANSACTION_RETRY); - } - /** - * Same as ingestAspectToLocalDB but for multiple aspects - * DO NOT CALL DIRECTLY, USE WRAPPED METHODS TO VALIDATE URN - */ - @Nonnull - @Deprecated - protected List> ingestAspectsToLocalDB( - @Nonnull final Urn urn, - @Nonnull List> aspectRecordsToIngest, - @Nonnull final AuditStamp auditStamp, - @Nonnull final SystemMetadata systemMetadata) { - - return _aspectDao.runInTransactionWithRetry(() -> { - - final Set aspectNames = aspectRecordsToIngest - .stream() - .map(Pair::getFirst) - .collect(Collectors.toSet()); - - Map latestAspects = getLatestAspectForUrn(urn, aspectNames); - Map nextVersions = _aspectDao.getNextVersions(urn.toString(), aspectNames); - - List> result = new ArrayList<>(); - for (Pair aspectRecord: aspectRecordsToIngest) { - String aspectName = aspectRecord.getFirst(); - RecordTemplate newValue = aspectRecord.getSecond(); - EntityAspect latest = latestAspects.get(aspectName); - long nextVersion = nextVersions.get(aspectName); - UpdateAspectResult updateResult = ingestAspectToLocalDBNoTransaction(urn, aspectName, ignored -> newValue, auditStamp, systemMetadata, - latest, nextVersion); - result.add(new Pair<>(aspectName, updateResult)); + // Retention optimization and tx + if (_retentionService != null) { + List retentionBatch = upsertResults.stream() + // Only consider retention when there was a previous version + .filter(result -> latestAspects.containsKey(result.getUrn().toString()) + && latestAspects.get(result.getUrn().toString()).containsKey(result.getRequest().getAspectName())) + .filter(result -> { + RecordTemplate oldAspect = result.getOldValue(); + RecordTemplate newAspect = result.getNewValue(); + // Apply retention policies if there was an update to existing aspect value + return oldAspect != newAspect && oldAspect != null && _retentionService != null; + }) + .map(result -> RetentionService.RetentionContext.builder() + .urn(result.getUrn()) + .aspectName(result.getRequest().getAspectName()) + .maxVersion(Optional.of(result.getMaxVersion())) + .build()) + .collect(Collectors.toList()); + _retentionService.applyRetentionWithPolicyDefaults(retentionBatch); + } else { + log.warn("Retention service is missing!"); } - return result; - }, DEFAULT_MAX_TRANSACTION_RETRY); - } - - @Nonnull - protected SystemMetadata generateSystemMetadataIfEmpty(@Nullable SystemMetadata systemMetadata) { - if (systemMetadata == null) { - systemMetadata = new SystemMetadata(); - systemMetadata.setRunId(DEFAULT_RUN_ID); - systemMetadata.setLastObserved(System.currentTimeMillis()); - } - return systemMetadata; - } - - @VisibleForTesting - void validateUrn(@Nonnull final Urn urn) { - EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(_entityRegistry); - validator.setCurrentEntitySpec(_entityRegistry.getEntitySpec(urn.getEntityType())); - RecordTemplateValidator.validate(buildKeyAspect(urn), validationResult -> { - throw new IllegalArgumentException("Invalid urn: " + urn + "\n Cause: " - + validationResult.getMessages()); }, validator); - if (urn.toString().trim().length() != urn.toString().length()) { - throw new IllegalArgumentException("Error: cannot provide an URN with leading or trailing whitespace"); - } - if (URLEncoder.encode(urn.toString()).length() > URN_NUM_BYTES_LIMIT) { - throw new IllegalArgumentException("Error: cannot provide an URN longer than " + Integer.toString(URN_NUM_BYTES_LIMIT) + " bytes (when URL encoded)"); - } - if (urn.toString().contains(DELIMITER_SEPARATOR)) { - throw new IllegalArgumentException("Error: URN cannot contain " + DELIMITER_SEPARATOR + " character"); - } - try { - Urn.createFromString(urn.toString()); - } catch (URISyntaxException e) { - throw new IllegalArgumentException(e); - } + return upsertResults; + }, aspectsBatch, DEFAULT_MAX_TRANSACTION_RETRY); } - @Override - public void ingestAspects(@Nonnull final Urn urn, @Nonnull List> aspectRecordsToIngest, - @Nonnull final AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata) { - - systemMetadata = generateSystemMetadataIfEmpty(systemMetadata); - - Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); - List> ingestResults = wrappedIngestAspectsToLocalDB(urn, aspectRecordsToIngest, auditStamp, systemMetadata); - ingestToLocalDBTimer.stop(); - - for (Pair result: ingestResults) { - sendEventForUpdateAspectResult(urn, result.getFirst(), result.getSecond()); + @Nonnull + private List emitMCL(List sqlResults, boolean emitMCL) { + List withEmitMCL = sqlResults.stream() + .map(result -> emitMCL ? conditionallyProduceMCLAsync(result) : result) + .collect(Collectors.toList()); + + // join futures messages, capture error state + List> statusPairs = withEmitMCL.stream() + .filter(result -> result.getMclFuture() != null) + .map(result -> { + try { + result.getMclFuture().get(); + return Pair.of(true, result); + } catch (InterruptedException | ExecutionException e) { + return Pair.of(false, result); + } + }).collect(Collectors.toList()); + + if (statusPairs.stream().anyMatch(p -> !p.getFirst())) { + log.error("Failed to produce MCLs: {}", statusPairs.stream() + .filter(p -> !p.getFirst()) + .map(Pair::getValue) + .map(v -> v.getRequest().toString()) + .collect(Collectors.toList())); + // TODO restoreIndices? + throw new RuntimeException("Failed to produce MCLs"); } - } - /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataAuditEvent}. - * - * Note that in general, this should not be used externally. It is currently serving upgrade scripts and - * is as such public. - * - * @param urn an urn associated with the new aspect - * @param aspectName name of the aspect being inserted - * @param newValue value of the aspect being inserted - * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time - * @param systemMetadata - * @return the {@link RecordTemplate} representation of the written aspect object - */ - @Override - public RecordTemplate ingestAspect(@Nonnull final Urn urn, @Nonnull final String aspectName, - @Nonnull final RecordTemplate newValue, @Nonnull final AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata) { - - log.debug("Invoked ingestAspect with urn: {}, aspectName: {}, newValue: {}", urn, aspectName, newValue); - - systemMetadata = generateSystemMetadataIfEmpty(systemMetadata); - - Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestAspectToLocalDB").time(); - UpdateAspectResult result = wrappedIngestAspectToLocalDB(urn, aspectName, ignored -> newValue, auditStamp, systemMetadata); - ingestToLocalDBTimer.stop(); - - return sendEventForUpdateAspectResult(urn, aspectName, result); + return withEmitMCL; } /** @@ -783,94 +702,39 @@ public RecordTemplate ingestAspect(@Nonnull final Urn urn, @Nonnull final String */ @Nullable @Override - public RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, @Nonnull String aspectName, - @Nonnull RecordTemplate newValue, @Nonnull AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata) { + public RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, + @Nonnull String aspectName, + @Nonnull RecordTemplate newValue, + @Nonnull AuditStamp auditStamp, + @Nonnull SystemMetadata systemMetadata) { log.debug("Invoked ingestAspectIfNotPresent with urn: {}, aspectName: {}, newValue: {}", urn, aspectName, newValue); - final SystemMetadata internalSystemMetadata = generateSystemMetadataIfEmpty(systemMetadata); - - Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestAspectToLocalDB").time(); - UpdateAspectResult result = _aspectDao.runInTransactionWithRetry(() -> { - final String urnStr = urn.toString(); - final EntityAspect latest = _aspectDao.getLatestAspect(urnStr, aspectName); - if (latest == null) { - long nextVersion = _aspectDao.getNextVersion(urnStr, aspectName); - - return ingestAspectToLocalDBNoTransaction(urn, aspectName, ignored -> newValue, auditStamp, - internalSystemMetadata, latest, nextVersion); - } - RecordTemplate oldValue = EntityUtils.toAspectRecord(urn, aspectName, latest.getMetadata(), getEntityRegistry()); - SystemMetadata oldMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); - return new UpdateAspectResult(urn, oldValue, oldValue, oldMetadata, oldMetadata, MetadataAuditOperation.UPDATE, auditStamp, - latest.getVersion()); - }, DEFAULT_MAX_TRANSACTION_RETRY); - ingestToLocalDBTimer.stop(); + AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() + .one(UpsertBatchItem.builder() + .urn(urn) + .aspectName(aspectName) + .aspect(newValue) + .systemMetadata(systemMetadata) + .build(_entityRegistry)) + .build(); + List ingested = ingestAspects(aspectsBatch, auditStamp, true, false); - return sendEventForUpdateAspectResult(urn, aspectName, result); - } - - protected RecordTemplate sendEventForUpdateAspectResult(@Nonnull final Urn urn, @Nonnull final String aspectName, - @Nonnull UpdateAspectResult result) { - - final RecordTemplate oldValue = result.getOldValue(); - final RecordTemplate updatedValue = result.getNewValue(); - final SystemMetadata oldSystemMetadata = result.getOldSystemMetadata(); - final SystemMetadata updatedSystemMetadata = result.getNewSystemMetadata(); - - // Apply retention policies asynchronously if there was an update to existing aspect value - if (oldValue != updatedValue && oldValue != null && _retentionService != null) { - _retentionService.applyRetention(urn, aspectName, - Optional.of(new RetentionService.RetentionContext(Optional.of(result.getMaxVersion())))); - } - - // Produce MCL after a successful update - boolean isNoOp = oldValue == updatedValue; - if (!isNoOp || _alwaysEmitChangeLog || shouldAspectEmitChangeLog(urn, aspectName)) { - log.debug(String.format("Producing MetadataChangeLog for ingested aspect %s, urn %s", aspectName, urn)); - String entityName = urnToEntityName(urn); - EntitySpec entitySpec = getEntityRegistry().getEntitySpec(entityName); - AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); - if (aspectSpec == null) { - throw new RuntimeException(String.format("Unknown aspect %s for entity %s", aspectName, entityName)); - } - - Timer.Context produceMCLTimer = MetricUtils.timer(this.getClass(), "produceMCL").time(); - produceMetadataChangeLog(urn, entityName, aspectName, aspectSpec, oldValue, updatedValue, oldSystemMetadata, - updatedSystemMetadata, result.getAuditStamp(), isNoOp ? ChangeType.RESTATE : ChangeType.UPSERT); - produceMCLTimer.stop(); - - // For legacy reasons, keep producing to the MAE event stream without blocking ingest - try { - Timer.Context produceMAETimer = MetricUtils.timer(this.getClass(), "produceMAE").time(); - produceMetadataAuditEvent(urn, aspectName, oldValue, updatedValue, result.getOldSystemMetadata(), - result.getNewSystemMetadata(), MetadataAuditOperation.UPDATE); - produceMAETimer.stop(); - } catch (Exception e) { - log.warn("Unable to produce legacy MAE, entity may not have legacy Snapshot schema.", e); - } - } else { - log.debug("Skipped producing MetadataAuditEvent for ingested aspect {}, urn {}. Aspect has not changed.", - aspectName, urn); - } - return updatedValue; + return ingested.stream().findFirst().get().getNewValue(); } /** - * Validates that a change type is valid for the given aspect - * @param changeType - * @param aspectSpec - * @return + * Wrapper around batch method for single item + * @param proposal the proposal + * @param auditStamp an audit stamp representing the time and actor proposing the change + * @param async a flag to control whether we commit to primary store or just write to proposal log before returning + * @return an {@link IngestResult} containing the results */ - private boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) { - if (aspectSpec.isTimeseries()) { - // Timeseries aspects only support UPSERT - return ChangeType.UPSERT.equals(changeType); - } else { - return (ChangeType.UPSERT.equals(changeType) || ChangeType.PATCH.equals(changeType)); - } + @Override + public IngestResult ingestProposal(MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async) { + return ingestProposal(AspectsBatchImpl.builder().mcps(List.of(proposal), getEntityRegistry()).build(), auditStamp, + async).stream().findFirst().get(); } - /** * Ingest a new {@link MetadataChangeProposal}. Note that this method does NOT include any additional aspects or do any * enrichment, instead it changes only those which are provided inside the metadata change proposal. @@ -878,174 +742,127 @@ private boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) * Do not use this method directly for creating new entities, as it DOES NOT create an Entity Key aspect in the DB. Instead, * use an Entity Client. * - * @param mcp the proposal to ingest + * @param aspectsBatch the proposals to ingest * @param auditStamp an audit stamp representing the time and actor proposing the change * @param async a flag to control whether we commit to primary store or just write to proposal log before returning - * @return an {@link IngestProposalResult} containing the results + * @return an {@link IngestResult} containing the results */ @Override - public IngestProposalResult ingestProposal(@Nonnull MetadataChangeProposal mcp, - AuditStamp auditStamp, final boolean async) { - - log.debug("entity type = {}", mcp.getEntityType()); - EntitySpec entitySpec = getEntityRegistry().getEntitySpec(mcp.getEntityType()); - log.debug("entity spec = {}", entitySpec); - - Urn entityUrn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + public Set ingestProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async) { - AspectSpec aspectSpec = validateAspect(mcp, entitySpec); + Stream timeseriesIngestResults = ingestTimeseriesProposal(aspectsBatch, auditStamp); + Stream nonTimeseriesIngestResults = async ? ingestProposalAsync(aspectsBatch) + : ingestProposalSync(aspectsBatch, auditStamp); - log.debug("aspect spec = {}", aspectSpec); - - if (!isValidChangeType(mcp.getChangeType(), aspectSpec)) { - throw new UnsupportedOperationException( - "ChangeType not supported: " + mcp.getChangeType() + " for aspect " + mcp.getAspectName()); - } - - SystemMetadata systemMetadata = generateSystemMetadataIfEmpty(mcp.getSystemMetadata()); - systemMetadata.setRegistryName(aspectSpec.getRegistryName()); - systemMetadata.setRegistryVersion(aspectSpec.getRegistryVersion().toString()); - - RecordTemplate oldAspect = null; - SystemMetadata oldSystemMetadata = null; - RecordTemplate newAspect; - SystemMetadata newSystemMetadata; - - if (!aspectSpec.isTimeseries()) { - if (!async) { - // When async mode is turned off, we write to primary store for non timeseries aspects - UpdateAspectResult result; - switch (mcp.getChangeType()) { - case UPSERT: - result = performUpsert(mcp, aspectSpec, systemMetadata, entityUrn, auditStamp); - break; - case PATCH: - result = performPatch(mcp, aspectSpec, systemMetadata, entityUrn, auditStamp); - break; - default: - // Should never reach since we throw error above - throw new UnsupportedOperationException("ChangeType not supported: " + mcp.getChangeType()); - } - oldAspect = result != null ? result.getOldValue() : null; - oldSystemMetadata = result != null ? result.getOldSystemMetadata() : null; - newAspect = result != null ? result.getNewValue() : null; - newSystemMetadata = result != null ? result.getNewSystemMetadata() : null; - } else { - // When async is turned on, we write to proposal log and return without waiting - _producer.produceMetadataChangeProposal(entityUrn, mcp); - return new IngestProposalResult(entityUrn, false, true); - } - } else { - // For timeseries aspects - newAspect = convertToRecordTemplate(mcp, aspectSpec); - newSystemMetadata = mcp.getSystemMetadata(); - } - - boolean didUpdate = - emitChangeLog(oldAspect, oldSystemMetadata, newAspect, newSystemMetadata, mcp, entityUrn, auditStamp, - aspectSpec); - - return new IngestProposalResult(entityUrn, didUpdate, false); + return Stream.concat(timeseriesIngestResults, nonTimeseriesIngestResults).collect(Collectors.toSet()); } - private AspectSpec validateAspect(MetadataChangeProposal mcp, EntitySpec entitySpec) { - if (!mcp.hasAspectName() || !mcp.hasAspect()) { - throw new UnsupportedOperationException("Aspect and aspect name is required for create and update operations"); - } - - AspectSpec aspectSpec = entitySpec.getAspectSpec(mcp.getAspectName()); - - if (aspectSpec == null) { - throw new RuntimeException( - String.format("Unknown aspect %s for entity %s", mcp.getAspectName(), - mcp.getEntityType())); + /** + * Timeseries is pass through to MCL, no MCP + * @param aspectsBatch timeseries upserts batch + * @param auditStamp provided audit information + * @return returns ingest proposal result, however was never in the MCP topic + */ + private Stream ingestTimeseriesProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp) { + List unsupported = aspectsBatch.getItems().stream() + .filter(item -> item.getAspectSpec().isTimeseries() && item.getChangeType() != ChangeType.UPSERT) + .collect(Collectors.toList()); + if (!unsupported.isEmpty()) { + throw new UnsupportedOperationException("ChangeType not supported: " + unsupported.stream() + .map(AbstractBatchItem::getChangeType).collect(Collectors.toSet())); } - return aspectSpec; - } - - private UpdateAspectResult performUpsert(MetadataChangeProposal mcp, AspectSpec aspectSpec, SystemMetadata - systemMetadata, Urn entityUrn, AuditStamp auditStamp) { - RecordTemplate aspect = convertToRecordTemplate(mcp, aspectSpec); - log.debug("aspect = {}", aspect); - - return upsertAspect(aspect, systemMetadata, mcp, entityUrn, auditStamp, aspectSpec); + List, Boolean>>>> timeseriesResults = aspectsBatch.getItems().stream() + .filter(item -> item.getAspectSpec().isTimeseries()) + .map(item -> (UpsertBatchItem) item) + .map(item -> Pair.of(item, conditionallyProduceMCLAsync(null, null, item.getAspect(), item.getSystemMetadata(), + item.getMetadataChangeProposal(), item.getUrn(), auditStamp, item.getAspectSpec()))) + .collect(Collectors.toList()); + + return timeseriesResults.stream().map(result -> { + Optional, Boolean>> emissionStatus = result.getSecond(); + + emissionStatus.ifPresent(status -> { + try { + status.getFirst().get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + + UpsertBatchItem request = result.getFirst(); + return IngestResult.builder() + .urn(request.getUrn()) + .request(request) + .publishedMCL(emissionStatus.map(status -> status.getFirst() != null).orElse(false)) + .processedMCL(emissionStatus.map(Pair::getSecond).orElse(false)) + .build(); + }); } - private UpdateAspectResult performPatch(MetadataChangeProposal mcp, AspectSpec aspectSpec, SystemMetadata - systemMetadata, Urn entityUrn, AuditStamp auditStamp) { - if (!supportsPatch(aspectSpec)) { - // Prevent unexpected behavior for aspects that do not currently have 1st class patch support, - // specifically having array based fields that require merging without specifying merge behavior can get into bad states - throw new UnsupportedOperationException("Aspect: " + aspectSpec.getName() + " does not currently support patch " - + "operations."); - } - Patch jsonPatch = convertToJsonPatch(mcp); - log.debug("patch = {}", jsonPatch); - - return patchAspect(jsonPatch, systemMetadata, mcp, entityUrn, auditStamp, aspectSpec); - } + /** + * For async ingestion of non-timeseries, any change type + * @param aspectsBatch non-timeseries ingest aspects + * @return produced items to the MCP topic + */ + private Stream ingestProposalAsync(AspectsBatch aspectsBatch) { + List nonTimeseries = aspectsBatch.getItems().stream() + .filter(item -> !item.getAspectSpec().isTimeseries()) + .collect(Collectors.toList()); - private boolean supportsPatch(AspectSpec aspectSpec) { - // Limit initial support to defined templates - return AspectTemplateEngine.SUPPORTED_TEMPLATES.contains(aspectSpec.getName()); - } + List> futures = nonTimeseries.stream().map(item -> + // When async is turned on, we write to proposal log and return without waiting + _producer.produceMetadataChangeProposal(item.getUrn(), item.getMetadataChangeProposal())) + .filter(Objects::nonNull) + .collect(Collectors.toList()); - private RecordTemplate convertToRecordTemplate(MetadataChangeProposal mcp, AspectSpec aspectSpec) { - RecordTemplate aspect; try { - aspect = GenericRecordUtils.deserializeAspect(mcp.getAspect().getValue(), - mcp.getAspect().getContentType(), aspectSpec); - ValidationUtils.validateOrThrow(aspect); - } catch (ModelConversionException e) { - throw new RuntimeException( - String.format("Could not deserialize %s for aspect %s", mcp.getAspect().getValue(), - mcp.getAspectName())); + return nonTimeseries.stream().map(item -> + IngestResult.builder() + .urn(item.getUrn()) + .request(item) + .publishedMCP(true) + .build()); + } finally { + futures.forEach(f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); } - log.debug("aspect = {}", aspect); - return aspect; } - private Patch convertToJsonPatch(MetadataChangeProposal mcp) { - JsonNode json; - try { - json = OBJECT_MAPPER.readTree(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8)); - return JsonPatch.fromJson(json); - } catch (IOException e) { - throw new IllegalArgumentException("Invalid JSON Patch: " + mcp.getAspect().getValue(), e); + private Stream ingestProposalSync(AspectsBatch aspectsBatch, AuditStamp auditStamp) { + AspectsBatchImpl nonTimeseries = AspectsBatchImpl.builder() + .items(aspectsBatch.getItems().stream() + .filter(item -> !item.getAspectSpec().isTimeseries()) + .collect(Collectors.toList())) + .build(); + + List unsupported = nonTimeseries.getItems().stream() + .filter(item -> item.getMetadataChangeProposal().getChangeType() != ChangeType.PATCH + && item.getMetadataChangeProposal().getChangeType() != ChangeType.UPSERT) + .collect(Collectors.toList()); + if (!unsupported.isEmpty()) { + throw new UnsupportedOperationException("ChangeType not supported: " + unsupported.stream() + .map(item -> item.getMetadataChangeProposal().getChangeType()).collect(Collectors.toSet())); } - } - private UpdateAspectResult upsertAspect(final RecordTemplate aspect, final SystemMetadata systemMetadata, - MetadataChangeProposal mcp, Urn entityUrn, AuditStamp auditStamp, AspectSpec aspectSpec) { - Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestProposalToLocalDB").time(); - UpdateAspectResult result = - wrappedIngestAspectToLocalDB(entityUrn, mcp.getAspectName(), ignored -> aspect, auditStamp, - systemMetadata); - ingestToLocalDBTimer.stop(); - RecordTemplate oldAspect = result.getOldValue(); - RecordTemplate newAspect = result.getNewValue(); - // Apply retention policies asynchronously if there was an update to existing aspect value - if (oldAspect != newAspect && oldAspect != null && _retentionService != null) { - _retentionService.applyRetention(entityUrn, aspectSpec.getName(), - Optional.of(new RetentionService.RetentionContext(Optional.of(result.getMaxVersion())))); - } - return result; - } + List upsertResults = ingestAspects(nonTimeseries, auditStamp, true, true); - private UpdateAspectResult patchAspect(final Patch patch, final SystemMetadata systemMetadata, - MetadataChangeProposal mcp, Urn entityUrn, AuditStamp auditStamp, AspectSpec aspectSpec) { - Timer.Context patchAspectToLocalDBTimer = MetricUtils.timer(this.getClass(), "patchAspect").time(); - UpdateAspectResult result = patchAspectToLocalDB(entityUrn, aspectSpec, patch, auditStamp, systemMetadata); - patchAspectToLocalDBTimer.stop(); - RecordTemplate oldAspect = result.getOldValue(); - RecordTemplate newAspect = result.getNewValue(); - // Apply retention policies asynchronously if there was an update to existing aspect value - if (oldAspect != newAspect && oldAspect != null && _retentionService != null) { - _retentionService.applyRetention(entityUrn, aspectSpec.getName(), - Optional.of(new RetentionService.RetentionContext(Optional.of(result.getMaxVersion())))); - } - return result; + return upsertResults.stream().map(result -> { + AbstractBatchItem item = result.getRequest(); + + return IngestResult.builder() + .urn(item.getUrn()) + .request(item) + .publishedMCL(result.getMclFuture() != null) + .sqlCommitted(true) + .isUpdate(result.getOldValue() != null) + .build(); + }); } @Override @@ -1070,43 +887,19 @@ public String batchApplyRetention(Integer start, Integer count, Integer attemptW return result.toString(); } - private boolean emitChangeLog(@Nullable RecordTemplate oldAspect, @Nullable SystemMetadata oldSystemMetadata, - RecordTemplate newAspect, SystemMetadata newSystemMetadata, - MetadataChangeProposal mcp, Urn entityUrn, - AuditStamp auditStamp, AspectSpec aspectSpec) { - boolean isNoOp = oldAspect == newAspect; - if (!isNoOp || _alwaysEmitChangeLog || shouldAspectEmitChangeLog(aspectSpec)) { - log.debug("Producing MetadataChangeLog for ingested aspect {}, urn {}", mcp.getAspectName(), entityUrn); - - final MetadataChangeLog metadataChangeLog = constructMCL(mcp, urnToEntityName(entityUrn), entityUrn, - isNoOp ? ChangeType.RESTATE : ChangeType.UPSERT, aspectSpec.getName(), auditStamp, newAspect, newSystemMetadata, - oldAspect, oldSystemMetadata); - - log.debug("Serialized MCL event: {}", metadataChangeLog); - - produceMetadataChangeLog(entityUrn, aspectSpec, metadataChangeLog); - preprocessEvent(metadataChangeLog); - - return true; - } else { - log.debug( - "Skipped producing MetadataChangeLog for ingested aspect {}, urn {}. Aspect has not changed.", - mcp.getAspectName(), entityUrn); - return false; - } - } - - private void preprocessEvent(MetadataChangeLog metadataChangeLog) { + private boolean preprocessEvent(MetadataChangeLog metadataChangeLog) { if (_preProcessHooks.isUiEnabled()) { if (metadataChangeLog.getSystemMetadata() != null) { if (metadataChangeLog.getSystemMetadata().getProperties() != null) { if (UI_SOURCE.equals(metadataChangeLog.getSystemMetadata().getProperties().get(APP_SOURCE))) { // Pre-process the update indices hook for UI updates to avoid perceived lag from Kafka _updateIndicesService.handleChangeEvent(metadataChangeLog); + return true; } } } } + return false; } @Override @@ -1130,6 +923,8 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No logger.accept(String.format( "Reading rows %s through %s from the aspects table completed.", args.start, args.start + args.batchSize)); + LinkedList> futures = new LinkedList<>(); + for (EbeanAspectV2 aspect : rows != null ? rows.getList() : List.of()) { // 1. Extract an Entity type from the entity Urn result.timeGetRowMs = System.currentTimeMillis() - startTime; @@ -1193,14 +988,21 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No latestSystemMetadata.setProperties(properties); // 5. Produce MAE events for the aspect record - produceMetadataChangeLog(urn, entityName, aspectName, aspectSpec, null, aspectRecord, null, + futures.add(alwaysProduceMCLAsync(urn, entityName, aspectName, aspectSpec, null, aspectRecord, null, latestSystemMetadata, new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - ChangeType.RESTATE); + ChangeType.RESTATE).getFirst()); result.sendMessageMs += System.currentTimeMillis() - startTime; rowsMigrated++; } + futures.stream().filter(Objects::nonNull).forEach(f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); try { TimeUnit.MILLISECONDS.sleep(args.batchDelayMs); } catch (InterruptedException e) { @@ -1211,40 +1013,6 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No return result; } - /** - * Updates a particular version of an aspect & optionally emits a {@link com.linkedin.mxe.MetadataAuditEvent}. - * - * Note that in general, this should not be used externally. It is currently serving upgrade scripts and - * is as such public. - * - * @param urn an urn associated with the aspect to update - * @param entityName name of the entity being updated - * @param aspectName name of the aspect being updated - * @param aspectSpec spec of the aspect being updated - * @param newValue new value of the aspect being updated - * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time - * @param version specific version of the aspect being requests - * @param emitMae whether a {@link com.linkedin.mxe.MetadataAuditEvent} should be emitted in correspondence upon - * successful update - * @return the {@link RecordTemplate} representation of the requested aspect object - */ - @Override - public RecordTemplate updateAspect( - @Nonnull final Urn urn, - @Nonnull final String entityName, - @Nonnull final String aspectName, - @Nonnull final AspectSpec aspectSpec, - @Nonnull final RecordTemplate newValue, - @Nonnull final AuditStamp auditStamp, - @Nonnull final long version, - @Nonnull final boolean emitMae) { - log.debug( - "Invoked updateAspect with urn: {}, aspectName: {}, newValue: {}, version: {}, emitMae: {}", urn, - aspectName, newValue, version, emitMae); - return updateAspect(urn, entityName, aspectName, aspectSpec, newValue, auditStamp, version, emitMae, - DEFAULT_MAX_TRANSACTION_RETRY); - } - /** * Lists the entity URNs found in storage. * @@ -1309,60 +1077,59 @@ public Map getEntities(@Nonnull final Set urns, @Nonnull Set, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull final AspectSpec aspectSpec, + @Nonnull final MetadataChangeLog metadataChangeLog) { + Future future = _producer.produceMetadataChangeLog(urn, aspectSpec, metadataChangeLog); + return Pair.of(future, preprocessEvent(metadataChangeLog)); } - protected Snapshot buildKeySnapshot(@Nonnull final Urn urn) { - final RecordTemplate keyAspectValue = buildKeyAspect(urn); - return toSnapshotUnion(toSnapshotRecord(urn, ImmutableList.of(toAspectUnion(urn, keyAspectValue)))); + @Override + public Pair, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, + @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, + @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, + @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, + @Nonnull final ChangeType changeType) { + final MetadataChangeLog metadataChangeLog = constructMCL(null, entityName, urn, changeType, aspectName, auditStamp, + newAspectValue, newSystemMetadata, oldAspectValue, oldSystemMetadata); + return alwaysProduceMCLAsync(urn, aspectSpec, metadataChangeLog); } - @Override - public void produceMetadataAuditEventForKey(@Nonnull final Urn urn, - @Nullable final SystemMetadata newSystemMetadata) { + public Optional, Boolean>> conditionallyProduceMCLAsync(@Nullable RecordTemplate oldAspect, + @Nullable SystemMetadata oldSystemMetadata, + RecordTemplate newAspect, SystemMetadata newSystemMetadata, + @Nullable MetadataChangeProposal mcp, Urn entityUrn, + AuditStamp auditStamp, AspectSpec aspectSpec) { + boolean isNoOp = oldAspect == newAspect; + if (!isNoOp || _alwaysEmitChangeLog || shouldAspectEmitChangeLog(aspectSpec)) { + log.debug("Producing MetadataChangeLog for ingested aspect {}, urn {}", aspectSpec.getName(), entityUrn); - final Snapshot newSnapshot = buildKeySnapshot(urn); + final MetadataChangeLog metadataChangeLog = constructMCL(mcp, urnToEntityName(entityUrn), entityUrn, + isNoOp ? ChangeType.RESTATE : ChangeType.UPSERT, aspectSpec.getName(), auditStamp, newAspect, newSystemMetadata, + oldAspect, oldSystemMetadata); - _producer.produceMetadataAuditEvent(urn, null, newSnapshot, null, newSystemMetadata, MetadataAuditOperation.UPDATE); + log.debug("Serialized MCL event: {}", metadataChangeLog); + Pair, Boolean> emissionStatus = alwaysProduceMCLAsync(entityUrn, aspectSpec, metadataChangeLog); + return emissionStatus.getFirst() != null ? Optional.of(emissionStatus) : Optional.empty(); + } else { + log.debug( + "Skipped producing MetadataChangeLog for ingested aspect {}, urn {}. Aspect has not changed.", + aspectSpec.getName(), entityUrn); + return Optional.empty(); + } } - /** - * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a - * new & previous aspect. - * - * @param urn the urn associated with the entity changed - * @param aspectSpec AspectSpec of the aspect being updated - * @param metadataChangeLog metadata change log to push into MCL kafka topic - */ - @Override - public void produceMetadataChangeLog(@Nonnull final Urn urn, AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog) { - _producer.produceMetadataChangeLog(urn, aspectSpec, metadataChangeLog); - } + private UpdateAspectResult conditionallyProduceMCLAsync(UpdateAspectResult result) { + AbstractBatchItem request = result.getRequest(); + Optional, Boolean>> emissionStatus = conditionallyProduceMCLAsync(result.getOldValue(), result.getOldSystemMetadata(), + result.getNewValue(), result.getNewSystemMetadata(), + request.getMetadataChangeProposal(), result.getUrn(), result.getAuditStamp(), request.getAspectSpec()); - @Override - public void produceMetadataChangeLog(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, - @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, - @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, - @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, @Nonnull final ChangeType changeType) { - final MetadataChangeLog metadataChangeLog = constructMCL(null, entityName, urn, changeType, aspectName, auditStamp, - newAspectValue, newSystemMetadata, oldAspectValue, oldSystemMetadata); - produceMetadataChangeLog(urn, aspectSpec, metadataChangeLog); + return emissionStatus.map(status -> + result.toBuilder() + .mclFuture(status.getFirst()) + .processedMCL(status.getSecond()) + .build() + ).orElse(result); } @Override @@ -1390,7 +1157,7 @@ public void ingestEntity(Entity entity, AuditStamp auditStamp) { @Override public void ingestEntity(@Nonnull Entity entity, @Nonnull AuditStamp auditStamp, - @Nonnull SystemMetadata systemMetadata) { + @Nonnull SystemMetadata systemMetadata) { log.debug("Invoked ingestEntity with entity {}, audit stamp {} systemMetadata {}", entity, auditStamp, systemMetadata.toString()); ingestSnapshotUnion(entity.getValue(), auditStamp, systemMetadata); } @@ -1432,63 +1199,93 @@ private boolean isAspectMissing(String entityType, String aspectName, Set> generateDefaultAspectsIfMissing(@Nonnull final Urn urn, - Set includedAspects) { + public Pair>> generateDefaultAspectsOnFirstWrite(@Nonnull final Urn urn, + Map includedAspects) { + List> returnAspects = new ArrayList<>(); - Set aspectsToGet = new HashSet<>(); - String entityType = urnToEntityName(urn); + final String keyAspectName = getKeyAspectName(urn); + final Map latestAspects = new HashMap<>(getLatestAspectsForUrn(urn, Set.of(keyAspectName))); - boolean shouldCheckBrowsePath = isAspectMissing(entityType, BROWSE_PATHS_ASPECT_NAME, includedAspects); - if (shouldCheckBrowsePath) { - aspectsToGet.add(BROWSE_PATHS_ASPECT_NAME); - } + // key aspect: does not exist in database && is being written + boolean generateDefaults = !latestAspects.containsKey(keyAspectName) && includedAspects.containsKey(keyAspectName); - boolean shouldCheckBrowsePathV2 = isAspectMissing(entityType, BROWSE_PATHS_V2_ASPECT_NAME, includedAspects); - if (shouldCheckBrowsePathV2) { - aspectsToGet.add(BROWSE_PATHS_V2_ASPECT_NAME); - } + // conditionally generate defaults + if (generateDefaults) { + String entityType = urnToEntityName(urn); + Set aspectsToGet = new HashSet<>(); - boolean shouldCheckDataPlatform = isAspectMissing(entityType, DATA_PLATFORM_INSTANCE_ASPECT_NAME, includedAspects); - if (shouldCheckDataPlatform) { - aspectsToGet.add(DATA_PLATFORM_INSTANCE_ASPECT_NAME); - } + boolean shouldCheckBrowsePath = isAspectMissing(entityType, BROWSE_PATHS_ASPECT_NAME, includedAspects.keySet()); + if (shouldCheckBrowsePath) { + aspectsToGet.add(BROWSE_PATHS_ASPECT_NAME); + } - List> aspects = new ArrayList<>(); - final String keyAspectName = getKeyAspectName(urn); - aspectsToGet.add(keyAspectName); + boolean shouldCheckBrowsePathV2 = isAspectMissing(entityType, BROWSE_PATHS_V2_ASPECT_NAME, includedAspects.keySet()); + if (shouldCheckBrowsePathV2) { + aspectsToGet.add(BROWSE_PATHS_V2_ASPECT_NAME); + } - Map latestAspects = getLatestAspectsForUrn(urn, aspectsToGet); + boolean shouldCheckDataPlatform = isAspectMissing(entityType, DATA_PLATFORM_INSTANCE_ASPECT_NAME, includedAspects.keySet()); + if (shouldCheckDataPlatform) { + aspectsToGet.add(DATA_PLATFORM_INSTANCE_ASPECT_NAME); + } - RecordTemplate keyAspect = latestAspects.get(keyAspectName); - if (keyAspect == null) { - keyAspect = buildKeyAspect(urn); - aspects.add(Pair.of(keyAspectName, keyAspect)); - } + // fetch additional aspects + latestAspects.putAll(getLatestAspectsForUrn(urn, aspectsToGet)); - if (shouldCheckBrowsePath && latestAspects.get(BROWSE_PATHS_ASPECT_NAME) == null) { - try { - BrowsePaths generatedBrowsePath = buildDefaultBrowsePath(urn); - aspects.add(Pair.of(BROWSE_PATHS_ASPECT_NAME, generatedBrowsePath)); - } catch (URISyntaxException e) { - log.error("Failed to parse urn: {}", urn); + if (shouldCheckBrowsePath && latestAspects.get(BROWSE_PATHS_ASPECT_NAME) == null + && !includedAspects.containsKey(BROWSE_PATHS_ASPECT_NAME)) { + try { + BrowsePaths generatedBrowsePath = buildDefaultBrowsePath(urn); + returnAspects.add(Pair.of(BROWSE_PATHS_ASPECT_NAME, generatedBrowsePath)); + } catch (URISyntaxException e) { + log.error("Failed to parse urn: {}", urn); + } } - } - if (shouldCheckBrowsePathV2 && latestAspects.get(BROWSE_PATHS_V2_ASPECT_NAME) == null) { - try { - BrowsePathsV2 generatedBrowsePathV2 = buildDefaultBrowsePathV2(urn, false); - aspects.add(Pair.of(BROWSE_PATHS_V2_ASPECT_NAME, generatedBrowsePathV2)); - } catch (URISyntaxException e) { - log.error("Failed to parse urn: {}", urn); + if (shouldCheckBrowsePathV2 && latestAspects.get(BROWSE_PATHS_V2_ASPECT_NAME) == null + && !includedAspects.containsKey(BROWSE_PATHS_V2_ASPECT_NAME)) { + try { + BrowsePathsV2 generatedBrowsePathV2 = buildDefaultBrowsePathV2(urn, false); + returnAspects.add(Pair.of(BROWSE_PATHS_V2_ASPECT_NAME, generatedBrowsePathV2)); + } catch (URISyntaxException e) { + log.error("Failed to parse urn: {}", urn); + } } - } - if (shouldCheckDataPlatform && latestAspects.get(DATA_PLATFORM_INSTANCE_ASPECT_NAME) == null) { - DataPlatformInstanceUtils.buildDataPlatformInstance(entityType, keyAspect) - .ifPresent(aspect -> aspects.add(Pair.of(DATA_PLATFORM_INSTANCE_ASPECT_NAME, aspect))); + if (shouldCheckDataPlatform && latestAspects.get(DATA_PLATFORM_INSTANCE_ASPECT_NAME) == null + && !includedAspects.containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { + RecordTemplate keyAspect = includedAspects.get(keyAspectName); + DataPlatformInstanceUtils.buildDataPlatformInstance(entityType, keyAspect) + .ifPresent(aspect -> returnAspects.add(Pair.of(DATA_PLATFORM_INSTANCE_ASPECT_NAME, aspect))); + } } - return aspects; + return Pair.of(latestAspects.containsKey(keyAspectName), returnAspects); + } + + @Override + public List> generateDefaultAspectsIfMissing(@Nonnull final Urn urn, + Map includedAspects) { + + final String keyAspectName = getKeyAspectName(urn); + + if (includedAspects.containsKey(keyAspectName)) { + return generateDefaultAspectsOnFirstWrite(urn, includedAspects).getValue(); + } else { + // No key aspect being written, generate it and potentially suggest writing it later + HashMap includedWithKeyAspect = new HashMap<>(includedAspects); + Pair keyAspect = Pair.of(keyAspectName, EntityUtils.buildKeyAspect(_entityRegistry, urn)); + includedWithKeyAspect.put(keyAspect.getKey(), keyAspect.getValue()); + + Pair>> returnAspects = generateDefaultAspectsOnFirstWrite(urn, includedWithKeyAspect); + + // missing key aspect in database, add it + if (!returnAspects.getFirst()) { + returnAspects.getValue().add(keyAspect); + } + + return returnAspects.getValue(); + } } private void ingestSnapshotUnion(@Nonnull final Snapshot snapshotUnion, @Nonnull final AuditStamp auditStamp, @@ -1500,28 +1297,18 @@ private void ingestSnapshotUnion(@Nonnull final Snapshot snapshotUnion, @Nonnull log.info("INGEST urn {} with system metadata {}", urn.toString(), systemMetadata.toString()); aspectRecordsToIngest.addAll(generateDefaultAspectsIfMissing(urn, - aspectRecordsToIngest.stream().map(pair -> pair.getFirst()).collect(Collectors.toSet()))); + aspectRecordsToIngest.stream().collect(Collectors.toMap(Pair::getKey, Pair::getValue)))); - ingestAspects(urn, aspectRecordsToIngest, auditStamp, systemMetadata); - } + AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() + .items(aspectRecordsToIngest.stream().map(pair -> UpsertBatchItem.builder() + .urn(urn) + .aspectName(pair.getKey()) + .aspect(pair.getValue()) + .systemMetadata(systemMetadata) + .build(_entityRegistry)).collect(Collectors.toList())) + .build(); - @Override - public Snapshot buildSnapshot(@Nonnull final Urn urn, @Nonnull final RecordTemplate aspectValue) { - // if the aspect value is the key, we do not need to include the key a second time - if (PegasusUtils.getAspectNameFromSchema(aspectValue.schema()).equals(getKeyAspectName(urn))) { - return toSnapshotUnion(toSnapshotRecord(urn, ImmutableList.of(toAspectUnion(urn, aspectValue)))); - } - - final RecordTemplate keyAspectValue = buildKeyAspect(urn); - return toSnapshotUnion( - toSnapshotRecord(urn, ImmutableList.of(toAspectUnion(urn, keyAspectValue), toAspectUnion(urn, aspectValue)))); - } - - protected RecordTemplate buildKeyAspect(@Nonnull final Urn urn) { - final EntitySpec spec = _entityRegistry.getEntitySpec(urnToEntityName(urn)); - final AspectSpec keySpec = spec.getKeyAspectSpec(); - final RecordDataSchema keySchema = keySpec.getPegasusSchema(); - return EntityKeyUtils.convertUrnToEntityKey(urn, keySpec); + ingestAspects(aspectsBatch, auditStamp, true, true); } @Override @@ -1641,24 +1428,33 @@ public RollbackRunResult rollbackWithConditions(List aspectRow List removedAspects = new ArrayList<>(); AtomicInteger rowsDeletedFromEntityDeletion = new AtomicInteger(0); - aspectRows.forEach(aspectToRemove -> { - + List> futures = aspectRows.stream().map(aspectToRemove -> { RollbackResult result = deleteAspect(aspectToRemove.getUrn(), aspectToRemove.getAspectName(), conditions, hardDelete); if (result != null) { Optional aspectSpec = getAspectSpec(result.entityName, result.aspectName); if (!aspectSpec.isPresent()) { log.error("Issue while rolling back: unknown aspect {} for entity {}", result.entityName, result.aspectName); - return; + return null; } rowsDeletedFromEntityDeletion.addAndGet(result.additionalRowsAffected); removedAspects.add(aspectToRemove); - produceMetadataChangeLog(result.getUrn(), result.getEntityName(), result.getAspectName(), aspectSpec.get(), + return alwaysProduceMCLAsync(result.getUrn(), result.getEntityName(), result.getAspectName(), aspectSpec.get(), result.getOldValue(), result.getNewValue(), result.getOldSystemMetadata(), result.getNewSystemMetadata(), // TODO: use properly attributed audit stamp. createSystemAuditStamp(), - result.getChangeType()); + result.getChangeType()).getFirst(); + } + + return null; + }).filter(Objects::nonNull).collect(Collectors.toList()); + + futures.forEach(f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); } }); @@ -1697,11 +1493,19 @@ public RollbackRunResult deleteUrn(Urn urn) { rowsDeletedFromEntityDeletion = result.additionalRowsAffected; removedAspects.add(summary); - produceMetadataChangeLog(result.getUrn(), result.getEntityName(), result.getAspectName(), keySpec, + Future future = alwaysProduceMCLAsync(result.getUrn(), result.getEntityName(), result.getAspectName(), keySpec, result.getOldValue(), result.getNewValue(), result.getOldSystemMetadata(), result.getNewSystemMetadata(), // TODO: Use a proper inferred audit stamp createSystemAuditStamp(), - result.getChangeType()); + result.getChangeType()).getFirst(); + + if (future != null) { + try { + future.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + } } return new RollbackRunResult(removedAspects, rowsDeletedFromEntityDeletion); @@ -1752,7 +1556,7 @@ public RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map { + final RollbackResult result = _aspectDao.runInTransactionWithRetry((tx) -> { Integer additionalRowsDeleted = 0; // 1. Fetch the latest existing version of the aspect. @@ -1799,7 +1603,7 @@ public RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map _aspectDao.deleteAspect(aspect)); + aspectsToDelete.forEach(aspect -> _aspectDao.deleteAspect(tx, aspect)); if (survivingAspect != null) { // if there was a surviving aspect, copy its information into the latest row @@ -1810,15 +1614,15 @@ public RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map getEnvelopedAspects(final S private EnvelopedAspect getKeyEnvelopedAspect(final Urn urn) { final EntitySpec spec = getEntityRegistry().getEntitySpec(PegasusUtils.urnToEntityName(urn)); final AspectSpec keySpec = spec.getKeyAspectSpec(); - final RecordDataSchema keySchema = keySpec.getPegasusSchema(); final com.linkedin.entity.Aspect aspect = new com.linkedin.entity.Aspect(EntityKeyUtils.convertUrnToEntityKey(urn, keySpec).data()); @@ -1992,19 +1795,19 @@ private EnvelopedAspect getKeyEnvelopedAspect(final Urn urn) { } @Nonnull - private UpdateAspectResult ingestAspectToLocalDBNoTransaction( - @Nonnull final Urn urn, - @Nonnull final String aspectName, - @Nonnull final Function, RecordTemplate> updateLambda, - @Nonnull final AuditStamp auditStamp, - @Nonnull final SystemMetadata providedSystemMetadata, - @Nullable final EntityAspect latest, - @Nonnull final Long nextVersion) { + private UpdateAspectResult ingestAspectToLocalDB( + @Nullable Transaction tx, + @Nonnull final Urn urn, + @Nonnull final String aspectName, + @Nonnull final RecordTemplate newValue, + @Nonnull final AuditStamp auditStamp, + @Nonnull final SystemMetadata providedSystemMetadata, + @Nullable final EntityAspect latest, + @Nonnull final Long nextVersion) { // 2. Compare the latest existing and new. final RecordTemplate oldValue = latest == null ? null : EntityUtils.toAspectRecord(urn, aspectName, latest.getMetadata(), getEntityRegistry()); - final RecordTemplate newValue = updateLambda.apply(Optional.ofNullable(oldValue)); // 3. If there is no difference between existing and new, we just update // the lastObserved in system metadata. RunId should stay as the original runId @@ -2014,20 +1817,28 @@ private UpdateAspectResult ingestAspectToLocalDBNoTransaction( latest.setSystemMetadata(RecordUtils.toJsonString(latestSystemMetadata)); - _aspectDao.saveAspect(latest, false); + log.info("Ingesting aspect with name {}, urn {}", aspectName, urn); + _aspectDao.saveAspect(tx, latest, false); // metrics _aspectDao.incrementWriteMetrics(aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); - return new UpdateAspectResult(urn, oldValue, oldValue, - EntityUtils.parseSystemMetadata(latest.getSystemMetadata()), latestSystemMetadata, - MetadataAuditOperation.UPDATE, auditStamp, 0); + return UpdateAspectResult.builder() + .urn(urn) + .oldValue(oldValue) + .newValue(oldValue) + .oldSystemMetadata(EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) + .newSystemMetadata(latestSystemMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(0) + .build(); } // 4. Save the newValue as the latest version log.debug("Ingesting aspect with name {}, urn {}", aspectName, urn); String newValueStr = EntityUtils.toJsonAspect(newValue); - long versionOfOld = _aspectDao.saveLatestAspect(urn.toString(), aspectName, latest == null ? null : EntityUtils.toJsonAspect(oldValue), + long versionOfOld = _aspectDao.saveLatestAspect(tx, urn.toString(), aspectName, latest == null ? null : EntityUtils.toJsonAspect(oldValue), latest == null ? null : latest.getCreatedBy(), latest == null ? null : latest.getCreatedFor(), latest == null ? null : latest.getCreatedOn(), latest == null ? null : latest.getSystemMetadata(), newValueStr, auditStamp.getActor().toString(), @@ -2037,75 +1848,16 @@ private UpdateAspectResult ingestAspectToLocalDBNoTransaction( // metrics _aspectDao.incrementWriteMetrics(aspectName, 1, newValueStr.getBytes(StandardCharsets.UTF_8).length); - return new UpdateAspectResult(urn, oldValue, newValue, - latest == null ? null : EntityUtils.parseSystemMetadata(latest.getSystemMetadata()), providedSystemMetadata, - MetadataAuditOperation.UPDATE, auditStamp, versionOfOld); - } - - @Nonnull - private Map getLatestAspectForUrn(@Nonnull final Urn urn, @Nonnull final Set aspectNames) { - Set urns = new HashSet<>(); - urns.add(urn); - - Map result = new HashMap<>(); - getLatestAspect(urns, aspectNames).forEach((key, aspectEntry) -> { - final String aspectName = key.getAspect(); - result.put(aspectName, aspectEntry); - }); - return result; - } - - @Nonnull - private RecordTemplate updateAspect( - @Nonnull final Urn urn, - @Nonnull final String entityName, - @Nonnull final String aspectName, - @Nonnull final AspectSpec aspectSpec, - @Nonnull final RecordTemplate value, - @Nonnull final AuditStamp auditStamp, - @Nonnull final long version, - @Nonnull final boolean emitMae, - final int maxTransactionRetry) { - - final UpdateAspectResult result = _aspectDao.runInTransactionWithRetry(() -> { - - final EntityAspect oldAspect = _aspectDao.getAspect(urn.toString(), aspectName, version); - final RecordTemplate oldValue = - oldAspect == null ? null : EntityUtils.toAspectRecord(urn, aspectName, oldAspect.getMetadata(), getEntityRegistry()); - - SystemMetadata oldSystemMetadata = - oldAspect == null ? new SystemMetadata() : EntityUtils.parseSystemMetadata(oldAspect.getSystemMetadata()); - // create a duplicate of the old system metadata to update and write back - SystemMetadata newSystemMetadata = - oldAspect == null ? new SystemMetadata() : EntityUtils.parseSystemMetadata(oldAspect.getSystemMetadata()); - newSystemMetadata.setLastObserved(System.currentTimeMillis()); - - log.debug("Updating aspect with name {}, urn {}", aspectName, urn); - String aspectStr = EntityUtils.toJsonAspect(value); - _aspectDao.saveAspect(urn.toString(), aspectName, aspectStr, auditStamp.getActor().toString(), - auditStamp.hasImpersonator() ? auditStamp.getImpersonator().toString() : null, - new Timestamp(auditStamp.getTime()), EntityUtils.toJsonAspect(newSystemMetadata), version, oldAspect == null); - - // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, aspectStr.getBytes(StandardCharsets.UTF_8).length); - - return new UpdateAspectResult(urn, oldValue, value, oldSystemMetadata, newSystemMetadata, - MetadataAuditOperation.UPDATE, auditStamp, version); - }, maxTransactionRetry); - - final RecordTemplate oldValue = result.getOldValue(); - final RecordTemplate newValue = result.getNewValue(); - - if (emitMae) { - log.debug("Producing MetadataAuditEvent for updated aspect {}, urn {}", aspectName, urn); - produceMetadataChangeLog(urn, entityName, aspectName, aspectSpec, oldValue, newValue, - result.getOldSystemMetadata(), result.getNewSystemMetadata(), auditStamp, ChangeType.UPSERT); - } else { - log.debug("Skipped producing MetadataAuditEvent for updated aspect {}, urn {}. emitMAE is false.", - aspectName, urn); - } - - return newValue; + return UpdateAspectResult.builder() + .urn(urn) + .oldValue(oldValue) + .newValue(newValue) + .oldSystemMetadata(latest == null ? null : EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) + .newSystemMetadata(providedSystemMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(versionOfOld) + .build(); } /** @@ -2174,14 +1926,7 @@ private DataPlatformInfo getDataPlatformInfo(Urn urn) { return null; } - private boolean shouldAspectEmitChangeLog(@Nonnull final Urn urn, @Nonnull final String aspectName) { - final String entityName = urnToEntityName(urn); - final EntitySpec entitySpec = getEntityRegistry().getEntitySpec(entityName); - final AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); - return shouldAspectEmitChangeLog(aspectSpec); - } - - private boolean shouldAspectEmitChangeLog(@Nonnull final AspectSpec aspectSpec) { + private static boolean shouldAspectEmitChangeLog(@Nonnull final AspectSpec aspectSpec) { final List relationshipFieldSpecs = aspectSpec.getRelationshipFieldSpecs(); return relationshipFieldSpecs.stream().anyMatch(RelationshipFieldSpec::isLineageRelationship); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java index a989f519910d6..ffd63479589bc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java @@ -8,10 +8,13 @@ import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.validation.EntityRegistryUrnValidator; import com.linkedin.metadata.entity.validation.RecordTemplateValidator; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; @@ -21,9 +24,11 @@ import lombok.extern.slf4j.Slf4j; import java.net.URISyntaxException; +import java.net.URLEncoder; import java.util.List; import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; @Slf4j @@ -32,6 +37,9 @@ public class EntityUtils { private EntityUtils() { } + public static final int URN_NUM_BYTES_LIMIT = 512; + public static final String URN_DELIMITER_SEPARATOR = "␟"; + @Nonnull public static String toJsonAspect(@Nonnull final RecordTemplate aspectRecord) { return RecordUtils.toJsonString(aspectRecord); @@ -60,10 +68,8 @@ public static void ingestChangeProposals( @Nonnull Urn actor, @Nonnull Boolean async ) { - // TODO: Replace this with a batch ingest proposals endpoint. - for (MetadataChangeProposal change : changes) { - entityService.ingestProposal(change, EntityUtils.getAuditStamp(actor), async); - } + entityService.ingestProposal(AspectsBatchImpl.builder() + .mcps(changes, entityService.getEntityRegistry()).build(), getAuditStamp(actor), async); } /** @@ -167,4 +173,34 @@ public static boolean checkIfRemoved(EntityService entityService, Urn entityUrn) return false; } } + + public static RecordTemplate buildKeyAspect(@Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { + final EntitySpec spec = entityRegistry.getEntitySpec(urnToEntityName(urn)); + final AspectSpec keySpec = spec.getKeyAspectSpec(); + return EntityKeyUtils.convertUrnToEntityKey(urn, keySpec); + } + + public static void validateUrn(@Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { + EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); + validator.setCurrentEntitySpec(entityRegistry.getEntitySpec(urn.getEntityType())); + RecordTemplateValidator.validate(EntityUtils.buildKeyAspect(entityRegistry, urn), validationResult -> { + throw new IllegalArgumentException("Invalid urn: " + urn + "\n Cause: " + + validationResult.getMessages()); }, validator); + + if (urn.toString().trim().length() != urn.toString().length()) { + throw new IllegalArgumentException("Error: cannot provide an URN with leading or trailing whitespace"); + } + if (URLEncoder.encode(urn.toString()).length() > URN_NUM_BYTES_LIMIT) { + throw new IllegalArgumentException("Error: cannot provide an URN longer than " + Integer.toString(URN_NUM_BYTES_LIMIT) + " bytes (when URL encoded)"); + } + if (urn.toString().contains(URN_DELIMITER_SEPARATOR)) { + throw new IllegalArgumentException("Error: URN cannot contain " + URN_DELIMITER_SEPARATOR + " character"); + } + try { + Urn.createFromString(urn.toString()); + } catch (URISyntaxException e) { + throw new IllegalArgumentException(e); + } + } + } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java index dcb58ee324a23..b215dd4a5d1ed 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java @@ -39,12 +39,13 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.function.Supplier; +import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import io.ebean.PagedList; +import io.ebean.Transaction; import lombok.extern.slf4j.Slf4j; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.*; @@ -85,6 +86,19 @@ public EntityAspect getLatestAspect(@Nonnull String urn, @Nonnull String aspectN return getAspect(urn, aspectName, ASPECT_LATEST_VERSION); } + @Override + public Map> getLatestAspects(Map> urnAspects) { + return urnAspects.entrySet().stream() + .map(entry -> Map.entry(entry.getKey(), entry.getValue().stream() + .map(aspectName -> { + EntityAspect aspect = getLatestAspect(entry.getKey(), aspectName); + return aspect != null ? Map.entry(aspectName, aspect) : null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + @Override public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName) { validateConnection(); @@ -149,7 +163,7 @@ private Map getMaxVersions(@Nonnull final String urn, @Nonnull fin } @Override - public void saveAspect(@Nonnull EntityAspect aspect, final boolean insert) { + public void saveAspect(@Nullable Transaction tx, @Nonnull EntityAspect aspect, final boolean insert) { validateConnection(); SimpleStatement statement = generateSaveStatement(aspect, insert); _cqlSession.execute(statement); @@ -241,7 +255,7 @@ public ListResult listAspectMetadata( @Override @Nonnull - public T runInTransactionWithRetry(@Nonnull final Supplier block, final int maxTransactionRetry) { + public T runInTransactionWithRetry(@Nonnull final Function block, final int maxTransactionRetry) { validateConnection(); int retryCount = 0; Exception lastException; @@ -249,7 +263,7 @@ public T runInTransactionWithRetry(@Nonnull final Supplier block, final i do { try { // TODO: Try to bend this code to make use of Cassandra batches. This method is called from single-urn operations, so perf should not suffer much - return block.get(); + return block.apply(null); } catch (DriverException exception) { lastException = exception; } @@ -319,7 +333,7 @@ private static AuditStamp toAuditStamp(@Nonnull final EntityAspect aspect) { } @Override - public void deleteAspect(@Nonnull final EntityAspect aspect) { + public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect) { validateConnection(); SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(aspect.getUrn())) @@ -332,7 +346,7 @@ public void deleteAspect(@Nonnull final EntityAspect aspect) { } @Override - public int deleteUrn(@Nonnull final String urn) { + public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { validateConnection(); SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) @@ -453,42 +467,42 @@ public Iterable listAllUrns(int start, int pageSize) { } @Override - public long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName) { + public Map> getNextVersions(Map> urnAspectMap) { validateConnection(); - Map versions = getNextVersions(urn, ImmutableSet.of(aspectName)); - return versions.get(aspectName); - } + Map> result = new HashMap<>(); - @Override - public Map getNextVersions(@Nonnull final String urn, @Nonnull final Set aspectNames) { - validateConnection(); - Map maxVersions = getMaxVersions(urn, aspectNames); - Map nextVersions = new HashMap<>(); + for (Map.Entry> aspectNames : urnAspectMap.entrySet()) { + Map maxVersions = getMaxVersions(aspectNames.getKey(), aspectNames.getValue()); + Map nextVersions = new HashMap<>(); - for (String aspectName: aspectNames) { - long latestVersion = maxVersions.get(aspectName); - long nextVal = latestVersion < 0 ? ASPECT_LATEST_VERSION : latestVersion + 1L; - nextVersions.put(aspectName, nextVal); + for (String aspectName : aspectNames.getValue()) { + long latestVersion = maxVersions.get(aspectName); + long nextVal = latestVersion < 0 ? ASPECT_LATEST_VERSION : latestVersion + 1L; + nextVersions.put(aspectName, nextVal); + } + + result.put(aspectNames.getKey(), nextVersions); } - return nextVersions; + return result; } @Override public long saveLatestAspect( - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion ) { validateConnection(); @@ -574,15 +588,16 @@ public void setWritable(boolean canWrite) { @Override public void saveAspect( - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert) { validateConnection(); final EntityAspect aspect = new EntityAspect( @@ -596,7 +611,7 @@ public void saveAspect( impersonator ); - saveAspect(aspect, insert); + saveAspect(tx, aspect, insert); // metrics incrementWriteMetrics(aspectName, 1, aspectMetadata.getBytes(StandardCharsets.UTF_8).length); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java index 3d8245b324ce5..9ebb6b26fc43d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java @@ -14,8 +14,11 @@ import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.EntityAspect; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; +import com.linkedin.metadata.entity.transactions.AspectsBatch; +import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.retention.DataHubRetentionConfig; import com.linkedin.retention.Retention; import com.linkedin.retention.TimeBasedRetention; @@ -53,22 +56,33 @@ public EntityService getEntityService() { return _entityService; } + @Override + protected AspectsBatch buildAspectsBatch(List mcps) { + return AspectsBatchImpl.builder() + .mcps(mcps, _entityService.getEntityRegistry()) + .build(); + } + @Override @WithSpan - public void applyRetention(@Nonnull Urn urn, @Nonnull String aspectName, Retention retentionPolicy, Optional retentionContext) { - log.debug("Applying retention to urn {}, aspectName {}", urn, aspectName); - // If no policies are set or has indefinite policy set, do not apply any retention - if (retentionPolicy.data().isEmpty()) { - return; - } + protected void applyRetention(List retentionContexts) { - if (retentionPolicy.hasVersion()) { - applyVersionBasedRetention(urn, aspectName, retentionPolicy.getVersion(), retentionContext.flatMap(RetentionService.RetentionContext::getMaxVersion)); - } + List nonEmptyContexts = retentionContexts.stream() + .filter(context -> context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) + .collect(Collectors.toList()); - if (retentionPolicy.hasTime()) { - applyTimeBasedRetention(urn, aspectName, retentionPolicy.getTime()); - } + nonEmptyContexts.forEach(context -> { + if (context.getRetentionPolicy().map(Retention::hasVersion).orElse(false)) { + Retention retentionPolicy = context.getRetentionPolicy().get(); + applyVersionBasedRetention(context.getUrn(), context.getAspectName(), retentionPolicy.getVersion(), context.getMaxVersion()); + } + + if (context.getRetentionPolicy().map(Retention::hasTime).orElse(false)) { + Retention retentionPolicy = context.getRetentionPolicy().get(); + applyTimeBasedRetention(context.getUrn(), context.getAspectName(), retentionPolicy.getTime()); + } + }); } @Override @@ -103,7 +117,12 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as .findFirst() .map(DataHubRetentionConfig::getRetention); retentionPolicy.ifPresent(retention -> - applyRetention(urn, aspectNameFromRecord, retention, Optional.of(new RetentionContext(Optional.of(id.getVersion()))))); + applyRetention(List.of(RetentionContext.builder() + .urn(urn) + .aspectName(aspectNameFromRecord) + .retentionPolicy(retentionPolicy) + .maxVersion(Optional.of(id.getVersion())) + .build()))); i += 1; if (i % _batchSize == 0) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java index f12e2ba521b15..c0aef268e14c9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.entity.ebean; import com.linkedin.metadata.Constants; -import io.ebean.EbeanServer; +import io.ebean.Database; import io.ebean.SqlQuery; import io.ebean.SqlRow; @@ -16,33 +16,33 @@ private AspectStorageValidationUtil() { } - public static long getV1RowCount(EbeanServer server) { + public static long getV1RowCount(Database server) { return server.find(EbeanAspectV1.class).findCount(); } /** * Get the number of rows created not by the DataHub system actor (urn:li:corpuser:__datahub_system) */ - public static long getV2NonSystemRowCount(EbeanServer server) { + public static long getV2NonSystemRowCount(Database server) { return server.find(EbeanAspectV2.class).where(ne("createdby", Constants.SYSTEM_ACTOR)).findCount(); } - public static boolean checkV2TableExists(EbeanServer server) { + public static boolean checkV2TableExists(Database server) { final String queryStr = "SELECT * FROM INFORMATION_SCHEMA.TABLES \n" - + "WHERE TABLE_NAME = 'metadata_aspect_v2'"; + + "WHERE lower(TABLE_NAME) = 'metadata_aspect_v2'"; - final SqlQuery query = server.createSqlQuery(queryStr); + final SqlQuery query = server.sqlQuery(queryStr); final List rows = query.findList(); return rows.size() > 0; } - public static boolean checkV1TableExists(EbeanServer server) { + public static boolean checkV1TableExists(Database server) { final String queryStr = "SELECT * FROM INFORMATION_SCHEMA.TABLES \n" - + "WHERE TABLE_NAME = 'metadata_aspect'"; + + "WHERE lower(TABLE_NAME) = 'metadata_aspect'"; - final SqlQuery query = server.createSqlQuery(queryStr); + final SqlQuery query = server.sqlQuery(queryStr); final List rows = query.findList(); return rows.size() > 0; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java index 9e65c752b531b..30886db264994 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java @@ -11,13 +11,16 @@ import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.ListResult; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; +import com.linkedin.metadata.entity.transactions.AspectsBatch; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.query.ExtraInfo; import com.linkedin.metadata.query.ExtraInfoArray; import com.linkedin.metadata.query.ListResultMetadata; import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; import io.ebean.DuplicateKeyException; -import io.ebean.EbeanServer; +import io.ebean.Database; import io.ebean.ExpressionList; import io.ebean.Junction; import io.ebean.PagedList; @@ -27,26 +30,24 @@ import io.ebean.Transaction; import io.ebean.TxScope; import io.ebean.annotation.TxIsolation; -import io.ebean.annotation.Platform; -import io.ebean.config.dbplatform.DatabasePlatform; -import io.ebean.plugin.SpiServer; import java.net.URISyntaxException; -import java.sql.SQLException; import java.sql.Timestamp; import java.time.Clock; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Supplier; +import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import javax.persistence.RollbackException; + import javax.persistence.PersistenceException; import javax.persistence.Table; + import lombok.extern.slf4j.Slf4j; import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; @@ -54,7 +55,7 @@ @Slf4j public class EbeanAspectDao implements AspectDao, AspectMigrationsDao { - private final EbeanServer _server; + private final Database _server; private boolean _connectionValidated = false; private final Clock _clock = Clock.systemUTC(); @@ -67,7 +68,7 @@ public class EbeanAspectDao implements AspectDao, AspectMigrationsDao { // more testing. private int _queryKeysCount = 375; // 0 means no pagination on keys - public EbeanAspectDao(@Nonnull final EbeanServer server) { + public EbeanAspectDao(@Nonnull final Database server) { _server = server; } @@ -77,10 +78,10 @@ public void setWritable(boolean canWrite) { } /** - * Return the {@link EbeanServer} server instance used for customized queries. + * Return the {@link Database} server instance used for customized queries. * Only used in tests. */ - public EbeanServer getServer() { + public Database getServer() { return _server; } @@ -107,19 +108,20 @@ private boolean validateConnection() { @Override public long saveLatestAspect( - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion ) { validateConnection(); @@ -130,26 +132,27 @@ public long saveLatestAspect( long largestVersion = ASPECT_LATEST_VERSION; if (oldAspectMetadata != null && oldTime != null) { largestVersion = nextVersion; - saveAspect(urn, aspectName, oldAspectMetadata, oldActor, oldImpersonator, oldTime, oldSystemMetadata, largestVersion, true); + saveAspect(tx, urn, aspectName, oldAspectMetadata, oldActor, oldImpersonator, oldTime, oldSystemMetadata, largestVersion, true); } // Save newValue as the latest version (v0) - saveAspect(urn, aspectName, newAspectMetadata, newActor, newImpersonator, newTime, newSystemMetadata, ASPECT_LATEST_VERSION, oldAspectMetadata == null); + saveAspect(tx, urn, aspectName, newAspectMetadata, newActor, newImpersonator, newTime, newSystemMetadata, ASPECT_LATEST_VERSION, oldAspectMetadata == null); return largestVersion; } @Override public void saveAspect( - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert) { validateConnection(); @@ -163,47 +166,38 @@ public void saveAspect( aspect.setCreatedFor(impersonator); } - saveEbeanAspect(aspect, insert); + saveEbeanAspect(tx, aspect, insert); } @Override - public void saveAspect(@Nonnull final EntityAspect aspect, final boolean insert) { + public void saveAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert) { EbeanAspectV2 ebeanAspect = EbeanAspectV2.fromEntityAspect(aspect); - saveEbeanAspect(ebeanAspect, insert); + saveEbeanAspect(tx, ebeanAspect, insert); } - private void saveEbeanAspect(@Nonnull final EbeanAspectV2 ebeanAspect, final boolean insert) { + private void saveEbeanAspect(@Nullable Transaction tx, @Nonnull final EbeanAspectV2 ebeanAspect, final boolean insert) { validateConnection(); if (insert) { - _server.insert(ebeanAspect); + _server.insert(ebeanAspect, tx); } else { - _server.update(ebeanAspect); + _server.update(ebeanAspect, tx); } } @Override - @Nullable - public EntityAspect getLatestAspect(@Nonnull final String urn, @Nonnull final String aspectName) { + public Map> getLatestAspects(@Nonnull Map> urnAspects) { validateConnection(); - final EbeanAspectV2.PrimaryKey key = new EbeanAspectV2.PrimaryKey(urn, aspectName, ASPECT_LATEST_VERSION); - EbeanAspectV2 ebeanAspect = _server.find(EbeanAspectV2.class, key); - return ebeanAspect == null ? null : ebeanAspect.toEntityAspect(); - } - @Override - public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName) { - validateConnection(); - List result = _server.find(EbeanAspectV2.class) - .where() - .eq("urn", urn) - .eq("aspect", aspectName) - .orderBy() - .desc("version") - .findList(); - if (result.size() == 0) { - return -1; - } - return result.get(0).getKey().getVersion(); + List keys = urnAspects.entrySet().stream() + .flatMap(entry -> entry.getValue().stream() + .map(aspect -> new EbeanAspectV2.PrimaryKey(entry.getKey(), aspect, ASPECT_LATEST_VERSION)) + ).collect(Collectors.toList()); + + List results = _server.find(EbeanAspectV2.class) + .where().idIn(keys) + .findList(); + + return toUrnAspectMap(results); } @Override @@ -240,16 +234,16 @@ public EntityAspect getAspect(@Nonnull final EntityAspectIdentifier key) { } @Override - public void deleteAspect(@Nonnull final EntityAspect aspect) { + public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect) { validateConnection(); EbeanAspectV2 ebeanAspect = EbeanAspectV2.fromEntityAspect(aspect); - _server.delete(ebeanAspect); + _server.delete(ebeanAspect, tx); } @Override - public int deleteUrn(@Nonnull final String urn) { + public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { validateConnection(); - return _server.createQuery(EbeanAspectV2.class).where().eq(EbeanAspectV2.URN_COLUMN, urn).delete(); + return _server.createQuery(EbeanAspectV2.class).where().eq(EbeanAspectV2.URN_COLUMN, urn).delete(tx); } @Override @@ -497,49 +491,37 @@ public ListResult listLatestAspectMetadata( @Override @Nonnull - public T runInTransactionWithRetry(@Nonnull final Supplier block, final int maxTransactionRetry) { + public T runInTransactionWithRetry(@Nonnull final Function block, final int maxTransactionRetry) { + return runInTransactionWithRetry(block, null, maxTransactionRetry); + } + + @Override + @Nonnull + public T runInTransactionWithRetry(@Nonnull final Function block, @Nullable AspectsBatch batch, + final int maxTransactionRetry) { validateConnection(); int retryCount = 0; - Exception lastException; + Exception lastException = null; T result = null; do { try (Transaction transaction = _server.beginTransaction(TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction.setBatchMode(true); - result = block.get(); + result = block.apply(transaction); transaction.commit(); lastException = null; break; - } catch (RollbackException | DuplicateKeyException exception) { - MetricUtils.counter(MetricRegistry.name(this.getClass(), "txFailed")).inc(); - lastException = exception; } catch (PersistenceException exception) { - MetricUtils.counter(MetricRegistry.name(this.getClass(), "txFailed")).inc(); - // TODO: replace this logic by catching SerializableConflictException above once the exception is available - SpiServer pluginApi = _server.getPluginApi(); - DatabasePlatform databasePlatform = pluginApi.getDatabasePlatform(); - - if (databasePlatform.isPlatform(Platform.POSTGRES)) { - Throwable cause = exception.getCause(); - if (cause instanceof SQLException) { - SQLException sqlException = (SQLException) cause; - String sqlState = sqlException.getSQLState(); - while (sqlState == null && sqlException.getCause() instanceof SQLException) { - sqlException = (SQLException) sqlException.getCause(); - sqlState = sqlException.getSQLState(); - } - - // version 11.33.3 of io.ebean does not have a SerializableConflictException (will be available with version 11.44.1), - // therefore when using a PostgreSQL database we have to check the SQL state 40001 here to retry the transactions - // also in case of serialization errors ("could not serialize access due to concurrent update") - if (sqlState.equals("40001")) { - lastException = exception; - continue; - } + if (exception instanceof DuplicateKeyException) { + if (batch != null && batch.getItems().stream().allMatch(a -> a.getAspectName().equals(a.getEntitySpec().getKeyAspectSpec().getName()))) { + log.warn("Skipping DuplicateKeyException retry since aspect is the key aspect. {}", batch.getUrnAspectsMap().keySet()); + continue; } } - throw exception; + MetricUtils.counter(MetricRegistry.name(this.getClass(), "txFailed")).inc(); + log.warn("Retryable PersistenceException: {}", exception.getMessage()); + lastException = exception; } } while (++retryCount <= maxTransactionRetry); @@ -552,57 +534,64 @@ public T runInTransactionWithRetry(@Nonnull final Supplier block, final i } @Override - public long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName) { + public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName) { validateConnection(); final List result = _server.find(EbeanAspectV2.class) - .where() - .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .orderBy() - .desc(EbeanAspectV2.VERSION_COLUMN) - .setMaxRows(1) - .findIds(); + .where() + .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .orderBy() + .desc(EbeanAspectV2.VERSION_COLUMN) + .setMaxRows(1) + .findIds(); - return result.isEmpty() ? 0 : result.get(0).getVersion() + 1L; + return result.isEmpty() ? -1 : result.get(0).getVersion(); } - @Override - public Map getNextVersions(@Nonnull final String urn, @Nonnull final Set aspectNames) { + public Map> getNextVersions(@Nonnull Map> urnAspects) { validateConnection(); - Map result = new HashMap<>(); + Junction queryJunction = _server.find(EbeanAspectV2.class) - .select("aspect, max(version)") - .where() - .eq("urn", urn) - .or(); + .select("urn, aspect, max(version)") + .where() + .in("urn", urnAspects.keySet()) + .or(); ExpressionList exp = null; - for (String aspectName: aspectNames) { + for (Map.Entry> entry: urnAspects.entrySet()) { if (exp == null) { - exp = queryJunction.eq("aspect", aspectName); + exp = queryJunction.and() + .eq("urn", entry.getKey()) + .in("aspect", entry.getValue()) + .endAnd(); } else { - exp = exp.eq("aspect", aspectName); + exp = exp.and() + .eq("urn", entry.getKey()) + .in("aspect", entry.getValue()) + .endAnd(); } } + + Map> result = new HashMap<>(); + // Default next version 0 + urnAspects.forEach((key, value) -> { + Map defaultNextVersion = new HashMap<>(); + value.forEach(aspectName -> defaultNextVersion.put(aspectName, 0L)); + result.put(key, defaultNextVersion); + }); + if (exp == null) { return result; } - // Order by ascending version so that the results are correctly populated. - // TODO: Improve the below logic to be more explicit. - exp.orderBy().asc(EbeanAspectV2.VERSION_COLUMN); + List dbResults = exp.endOr().findIds(); for (EbeanAspectV2.PrimaryKey key: dbResults) { - result.put(key.getAspect(), key.getVersion()); - } - - for (String aspectName: aspectNames) { - long nextVal = ASPECT_LATEST_VERSION; - if (result.containsKey(aspectName)) { - nextVal = result.get(aspectName) + 1L; + if (result.get(key.getUrn()).get(key.getAspect()) <= key.getVersion()) { + result.get(key.getUrn()).put(key.getAspect(), key.getVersion() + 1L); } - result.put(aspectName, nextVal); } + return result; } @@ -676,4 +665,21 @@ public List getAspectsInRange(@Nonnull Urn urn, Set aspect .findList(); return ebeanAspects.stream().map(EbeanAspectV2::toEntityAspect).collect(Collectors.toList()); } + + private static Map toAspectMap(Set beans) { + return beans.stream().map(bean -> Map.entry(bean.getAspect(), bean)) + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toEntityAspect())); + } + + private static Map> toUrnAspectMap(Collection beans) { + return beans.stream() + .collect(Collectors.groupingBy(EbeanAspectV2::getUrn, Collectors.toSet())) + .entrySet().stream() + .map(e -> Map.entry(e.getKey(), toAspectMap(e.getValue()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + private static String buildMetricName(EntitySpec entitySpec, AspectSpec aspectSpec, String status) { + return String.join(MetricUtils.DELIMITER, List.of(entitySpec.getName(), aspectSpec.getName(), status.toLowerCase())); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java index 1c0729b7c27e4..d94ec1fa7ae2b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java @@ -4,25 +4,28 @@ import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; +import com.linkedin.metadata.entity.transactions.AspectsBatch; +import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.retention.DataHubRetentionConfig; import com.linkedin.retention.Retention; import com.linkedin.retention.TimeBasedRetention; import com.linkedin.retention.VersionBasedRetention; import com.linkedin.metadata.Constants; -import io.ebean.EbeanServer; +import io.ebean.Database; import io.ebean.Expression; import io.ebean.ExpressionList; import io.ebean.PagedList; import io.ebean.Query; import io.ebean.Transaction; +import io.ebean.TxScope; import io.ebeaninternal.server.expression.Op; import io.ebeaninternal.server.expression.SimpleExpression; import io.opentelemetry.extension.annotations.WithSpan; import java.sql.Timestamp; import java.time.Clock; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -38,7 +41,7 @@ @RequiredArgsConstructor public class EbeanRetentionService extends RetentionService { private final EntityService _entityService; - private final EbeanServer _server; + private final Database _server; private final int _batchSize; private final Clock _clock = Clock.systemUTC(); @@ -48,35 +51,59 @@ public EntityService getEntityService() { return _entityService; } + @Override + protected AspectsBatch buildAspectsBatch(List mcps) { + return AspectsBatchImpl.builder() + .mcps(mcps, _entityService.getEntityRegistry()) + .build(); + } + @Override @WithSpan - public void applyRetention(@Nonnull Urn urn, @Nonnull String aspectName, Retention retentionPolicy, - Optional retentionContext) { - log.debug("Applying retention to urn {}, aspectName {}", urn, aspectName); - // If no policies are set or has indefinite policy set, do not apply any retention - if (retentionPolicy.data().isEmpty()) { - return; - } - ExpressionList deleteQuery = _server.find(EbeanAspectV2.class) - .where() - .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .ne(EbeanAspectV2.VERSION_COLUMN, Constants.ASPECT_LATEST_VERSION) - .or(); - - List filterList = new ArrayList<>(); - if (retentionPolicy.hasVersion()) { - getVersionBasedRetentionQuery(urn, aspectName, retentionPolicy.getVersion(), - retentionContext.flatMap(RetentionService.RetentionContext::getMaxVersion)).ifPresent(filterList::add); - } - if (retentionPolicy.hasTime()) { - filterList.add(getTimeBasedRetentionQuery(retentionPolicy.getTime())); - } + protected void applyRetention(List retentionContexts) { + + List nonEmptyContexts = retentionContexts.stream() + .filter(context -> context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()).collect(Collectors.toList()); // Only run delete if at least one of the retention policies are applicable - if (!filterList.isEmpty()) { - filterList.forEach(deleteQuery::add); - deleteQuery.endOr().delete(); + if (!nonEmptyContexts.isEmpty()) { + ExpressionList deleteQuery = _server.find(EbeanAspectV2.class) + .where() + .ne(EbeanAspectV2.VERSION_COLUMN, Constants.ASPECT_LATEST_VERSION) + .or(); + + boolean applied = false; + for (RetentionContext context : nonEmptyContexts) { + Retention retentionPolicy = context.getRetentionPolicy().get(); + + if (retentionPolicy.hasVersion()) { + boolean appliedVersion = getVersionBasedRetentionQuery(context.getUrn(), context.getAspectName(), + retentionPolicy.getVersion(), context.getMaxVersion()) + .map(expr -> + deleteQuery.and() + .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) + .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) + .add(expr) + .endAnd() + ).isPresent(); + + applied = appliedVersion || applied; + } + + if (retentionPolicy.hasTime()) { + deleteQuery.and() + .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) + .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) + .add(getTimeBasedRetentionQuery(retentionPolicy.getTime())) + .endAnd(); + applied = true; + } + } + + if (applied) { + deleteQuery.endOr().delete(); + } } } @@ -118,36 +145,46 @@ private void applyRetention( Map retentionPolicyMap, BulkApplyRetentionResult applyRetentionResult ) { - try (Transaction transaction = _server.beginTransaction()) { + try (Transaction transaction = _server.beginTransaction(TxScope.required())) { transaction.setBatchMode(true); transaction.setBatchSize(_batchSize); - for (EbeanAspectV2 row : rows.getList()) { - // Only run for cases where there's multiple versions of the aspect - if (row.getVersion() == 0) { - continue; - } - // 1. Extract an Entity type from the entity Urn - Urn urn; - try { - urn = Urn.createFromString(row.getUrn()); - } catch (Exception e) { - log.error("Failed to serialize urn {}", row.getUrn(), e); - continue; - } - final String aspectNameFromRecord = row.getAspect(); - log.debug("Handling urn {} aspect {}", row.getUrn(), row.getAspect()); - // Get the retention policies to apply from the local retention policy map - Optional retentionPolicy = getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() - .map(key -> retentionPolicyMap.get(key.toString())) - .filter(Objects::nonNull) - .findFirst() - .map(DataHubRetentionConfig::getRetention); - retentionPolicy.ifPresent(retention -> applyRetention(urn, aspectNameFromRecord, retention, - Optional.of(new RetentionContext(Optional.of(row.getVersion()))))); - if (applyRetentionResult != null) { - applyRetentionResult.rowsHandled += 1; - } + + List retentionContexts = rows.getList().stream() + .filter(row -> row.getVersion() != 0) + .map(row -> { + // 1. Extract an Entity type from the entity Urn + Urn urn; + try { + urn = Urn.createFromString(row.getUrn()); + } catch (Exception e) { + log.error("Failed to serialize urn {}", row.getUrn(), e); + return null; + } + + final String aspectNameFromRecord = row.getAspect(); + log.debug("Handling urn {} aspect {}", row.getUrn(), row.getAspect()); + // Get the retention policies to apply from the local retention policy map + Optional retentionPolicy = getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() + .map(key -> retentionPolicyMap.get(key.toString())) + .filter(Objects::nonNull) + .findFirst() + .map(DataHubRetentionConfig::getRetention); + + return RetentionService.RetentionContext.builder() + .urn(urn) + .aspectName(aspectNameFromRecord) + .retentionPolicy(retentionPolicy) + .maxVersion(Optional.of(row.getVersion())) + .build(); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + + applyRetention(retentionContexts); + if (applyRetentionResult != null) { + applyRetentionResult.rowsHandled += retentionContexts.size(); } + transaction.commit(); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java new file mode 100644 index 0000000000000..ca5e070bc5ca7 --- /dev/null +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java @@ -0,0 +1,67 @@ +package com.linkedin.metadata.entity.ebean.transactions; + +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.transactions.AbstractBatchItem; +import com.linkedin.metadata.entity.transactions.AspectsBatch; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.mxe.MetadataChangeProposal; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + + +@Slf4j +@Getter +@Builder(toBuilder = true) +public class AspectsBatchImpl implements AspectsBatch { + private final List items; + + public static class AspectsBatchImplBuilder { + /** + * Just one aspect record template + * @param data aspect data + * @return builder + */ + public AspectsBatchImplBuilder one(AbstractBatchItem data) { + this.items = List.of(data); + return this; + } + + public AspectsBatchImplBuilder mcps(List mcps, EntityRegistry entityRegistry) { + this.items = mcps.stream().map(mcp -> { + if (mcp.getChangeType().equals(ChangeType.PATCH)) { + return PatchBatchItem.PatchBatchItemBuilder.build(mcp, entityRegistry); + } else { + return UpsertBatchItem.UpsertBatchItemBuilder.build(mcp, entityRegistry); + } + }).collect(Collectors.toList()); + return this; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + AspectsBatchImpl that = (AspectsBatchImpl) o; + return Objects.equals(items, that.items); + } + + @Override + public int hashCode() { + return Objects.hash(items); + } + + @Override + public String toString() { + return "AspectsBatchImpl{" + "items=" + items + '}'; + } +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java new file mode 100644 index 0000000000000..cc0b3d915b407 --- /dev/null +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java @@ -0,0 +1,188 @@ +package com.linkedin.metadata.entity.ebean.transactions; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.fge.jsonpatch.JsonPatch; +import com.github.fge.jsonpatch.JsonPatchException; +import com.github.fge.jsonpatch.Patch; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.metadata.entity.transactions.AbstractBatchItem; +import com.linkedin.metadata.entity.validation.ValidationUtils; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.mxe.SystemMetadata; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Objects; + +import static com.linkedin.metadata.Constants.*; + +@Slf4j +@Getter +@Builder(toBuilder = true) +public class PatchBatchItem extends AbstractBatchItem { + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { + int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + // urn an urn associated with the new aspect + private final Urn urn; + // aspectName name of the aspect being inserted + private final String aspectName; + private final SystemMetadata systemMetadata; + + private final Patch patch; + + private final MetadataChangeProposal metadataChangeProposal; + + // derived + private final EntitySpec entitySpec; + private final AspectSpec aspectSpec; + + @Override + public ChangeType getChangeType() { + return ChangeType.PATCH; + } + + @Override + public void validateUrn(EntityRegistry entityRegistry, Urn urn) { + EntityUtils.validateUrn(entityRegistry, urn); + } + + public UpsertBatchItem applyPatch(EntityRegistry entityRegistry, RecordTemplate recordTemplate) { + UpsertBatchItem.UpsertBatchItemBuilder builder = UpsertBatchItem.builder() + .urn(getUrn()) + .aspectName(getAspectName()) + .metadataChangeProposal(getMetadataChangeProposal()) + .systemMetadata(getSystemMetadata()); + + AspectTemplateEngine aspectTemplateEngine = entityRegistry.getAspectTemplateEngine(); + + RecordTemplate currentValue = recordTemplate != null ? recordTemplate + : aspectTemplateEngine.getDefaultTemplate(getAspectName()); + + if (currentValue == null) { + // Attempting to patch a value to an aspect which has no default value and no existing value. + throw new UnsupportedOperationException(String.format("Patch not supported for aspect with name %s. " + + "Default aspect is required because no aspect currently exists for urn %s.", getAspectName(), getUrn())); + } + + try { + builder.aspect(aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec())); + } catch (JsonProcessingException | JsonPatchException e) { + throw new RuntimeException(e); + } + + return builder.build(entityRegistry); + } + + public static class PatchBatchItemBuilder { + + public PatchBatchItem build(EntityRegistry entityRegistry) { + EntityUtils.validateUrn(entityRegistry, this.urn); + log.debug("entity type = {}", this.urn.getEntityType()); + + entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + log.debug("entity spec = {}", this.entitySpec); + + aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); + log.debug("aspect spec = {}", this.aspectSpec); + + if (this.patch == null) { + throw new IllegalArgumentException(String.format("Missing patch to apply. Aspect: %s", + this.aspectSpec.getName())); + } + + return new PatchBatchItem(this.urn, this.aspectName, generateSystemMetadataIfEmpty(this.systemMetadata), + this.patch, this.metadataChangeProposal, this.entitySpec, this.aspectSpec); + } + + public static PatchBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { + log.debug("entity type = {}", mcp.getEntityType()); + EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + AspectSpec aspectSpec = validateAspect(mcp, entitySpec); + + if (!isValidChangeType(ChangeType.PATCH, aspectSpec)) { + throw new UnsupportedOperationException("ChangeType not supported: " + mcp.getChangeType() + + " for aspect " + mcp.getAspectName()); + } + + Urn urn = mcp.getEntityUrn(); + if (urn == null) { + urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + } + + PatchBatchItemBuilder builder = PatchBatchItem.builder() + .urn(urn) + .aspectName(mcp.getAspectName()) + .systemMetadata(mcp.getSystemMetadata()) + .metadataChangeProposal(mcp) + .patch(convertToJsonPatch(mcp)); + + return builder.build(entityRegistry); + } + + private PatchBatchItemBuilder entitySpec(EntitySpec entitySpec) { + this.entitySpec = entitySpec; + return this; + } + + private PatchBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { + this.aspectSpec = aspectSpec; + return this; + } + + private static Patch convertToJsonPatch(MetadataChangeProposal mcp) { + JsonNode json; + try { + json = OBJECT_MAPPER.readTree(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8)); + return JsonPatch.fromJson(json); + } catch (IOException e) { + throw new IllegalArgumentException("Invalid JSON Patch: " + mcp.getAspect().getValue(), e); + } + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PatchBatchItem that = (PatchBatchItem) o; + return urn.equals(that.urn) && aspectName.equals(that.aspectName) && Objects.equals(systemMetadata, that.systemMetadata) && patch.equals(that.patch); + } + + @Override + public int hashCode() { + return Objects.hash(urn, aspectName, systemMetadata, patch); + } + + @Override + public String toString() { + return "PatchBatchItem{" + + "urn=" + urn + + ", aspectName='" + aspectName + + '\'' + + ", systemMetadata=" + systemMetadata + + ", patch=" + patch + + '}'; + } +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java new file mode 100644 index 0000000000000..bd58d267a8308 --- /dev/null +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java @@ -0,0 +1,173 @@ +package com.linkedin.metadata.entity.ebean.transactions; + +import com.datahub.util.exception.ModelConversionException; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.EntityAspect; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.metadata.entity.transactions.AbstractBatchItem; +import com.linkedin.metadata.entity.validation.ValidationUtils; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.mxe.SystemMetadata; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +import java.sql.Timestamp; +import java.util.Objects; + +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + + +@Slf4j +@Getter +@Builder(toBuilder = true) +public class UpsertBatchItem extends AbstractBatchItem { + + // urn an urn associated with the new aspect + private final Urn urn; + // aspectName name of the aspect being inserted + private final String aspectName; + private final SystemMetadata systemMetadata; + + private final RecordTemplate aspect; + + private final MetadataChangeProposal metadataChangeProposal; + + // derived + private final EntitySpec entitySpec; + private final AspectSpec aspectSpec; + + @Override + public ChangeType getChangeType() { + return ChangeType.UPSERT; + } + + @Override + public void validateUrn(EntityRegistry entityRegistry, Urn urn) { + EntityUtils.validateUrn(entityRegistry, urn); + } + + public EntityAspect toLatestEntityAspect(AuditStamp auditStamp) { + EntityAspect latest = new EntityAspect(); + latest.setAspect(getAspectName()); + latest.setMetadata(EntityUtils.toJsonAspect(getAspect())); + latest.setUrn(getUrn().toString()); + latest.setVersion(ASPECT_LATEST_VERSION); + latest.setCreatedOn(new Timestamp(auditStamp.getTime())); + latest.setCreatedBy(auditStamp.getActor().toString()); + return latest; + } + + public static class UpsertBatchItemBuilder { + + public UpsertBatchItem build(EntityRegistry entityRegistry) { + EntityUtils.validateUrn(entityRegistry, this.urn); + log.debug("entity type = {}", this.urn.getEntityType()); + + entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + log.debug("entity spec = {}", this.entitySpec); + + aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); + log.debug("aspect spec = {}", this.aspectSpec); + + ValidationUtils.validateRecordTemplate(entityRegistry, this.entitySpec, this.urn, this.aspect); + + return new UpsertBatchItem(this.urn, this.aspectName, AbstractBatchItem.generateSystemMetadataIfEmpty(this.systemMetadata), + this.aspect, this.metadataChangeProposal, this.entitySpec, this.aspectSpec); + } + + public static UpsertBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { + if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { + throw new IllegalArgumentException("Invalid MCP, this class only supports change type of UPSERT."); + } + + log.debug("entity type = {}", mcp.getEntityType()); + EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + AspectSpec aspectSpec = validateAspect(mcp, entitySpec); + + if (!isValidChangeType(ChangeType.UPSERT, aspectSpec)) { + throw new UnsupportedOperationException("ChangeType not supported: " + mcp.getChangeType() + + " for aspect " + mcp.getAspectName()); + } + + Urn urn = mcp.getEntityUrn(); + if (urn == null) { + urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + } + + UpsertBatchItemBuilder builder = UpsertBatchItem.builder() + .urn(urn) + .aspectName(mcp.getAspectName()) + .systemMetadata(mcp.getSystemMetadata()) + .metadataChangeProposal(mcp) + .aspect(convertToRecordTemplate(mcp, aspectSpec)); + + return builder.build(entityRegistry); + } + + private UpsertBatchItemBuilder entitySpec(EntitySpec entitySpec) { + this.entitySpec = entitySpec; + return this; + } + + private UpsertBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { + this.aspectSpec = aspectSpec; + return this; + } + + private static RecordTemplate convertToRecordTemplate(MetadataChangeProposal mcp, AspectSpec aspectSpec) { + RecordTemplate aspect; + try { + aspect = GenericRecordUtils.deserializeAspect(mcp.getAspect().getValue(), + mcp.getAspect().getContentType(), aspectSpec); + ValidationUtils.validateOrThrow(aspect); + } catch (ModelConversionException e) { + throw new RuntimeException( + String.format("Could not deserialize %s for aspect %s", mcp.getAspect().getValue(), + mcp.getAspectName())); + } + log.debug("aspect = {}", aspect); + return aspect; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UpsertBatchItem that = (UpsertBatchItem) o; + return urn.equals(that.urn) && aspectName.equals(that.aspectName) && Objects.equals(systemMetadata, that.systemMetadata) && aspect.equals(that.aspect); + } + + @Override + public int hashCode() { + return Objects.hash(urn, aspectName, systemMetadata, aspect); + } + + @Override + public String toString() { + return "UpsertBatchItem{" + + "urn=" + + urn + + ", aspectName='" + + aspectName + + '\'' + + ", systemMetadata=" + + systemMetadata + + ", aspect=" + + aspect + + '}'; + } +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java index 99bb323e51ecb..6182b27333cbb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java @@ -1,8 +1,16 @@ package com.linkedin.metadata.entity.validation; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.schema.validation.ValidationResult; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; import lombok.extern.slf4j.Slf4j; +import java.util.function.Consumer; + @Slf4j public class ValidationUtils { @@ -33,6 +41,36 @@ public static void validateOrWarn(RecordTemplate record) { }); } + public static AspectSpec validate(EntitySpec entitySpec, String aspectName) { + if (aspectName == null || aspectName.isEmpty()) { + throw new UnsupportedOperationException("Aspect name is required for create and update operations"); + } + + AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); + + if (aspectSpec == null) { + throw new RuntimeException( + String.format("Unknown aspect %s for entity %s", aspectName, entitySpec.getName())); + } + + return aspectSpec; + } + + public static void validateRecordTemplate(EntityRegistry entityRegistry, EntitySpec entitySpec, Urn urn, RecordTemplate aspect) { + EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); + validator.setCurrentEntitySpec(entitySpec); + Consumer resultFunction = validationResult -> { + throw new IllegalArgumentException("Invalid format for aspect: " + entitySpec.getName() + "\n Cause: " + + validationResult.getMessages()); }; + RecordTemplateValidator.validate(EntityUtils.buildKeyAspect(entityRegistry, urn), resultFunction, validator); + RecordTemplateValidator.validate(aspect, resultFunction, validator); + } + + public static void validateRecordTemplate(EntityRegistry entityRegistry, Urn urn, RecordTemplate aspect) { + EntitySpec entitySpec = entityRegistry.getEntitySpec(urn.getEntityType()); + validateRecordTemplate(entityRegistry, entitySpec, urn, aspect); + } + private ValidationUtils() { } } \ No newline at end of file diff --git a/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java b/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java index c83764284c0c4..ffadc07124727 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java @@ -1,16 +1,12 @@ package com.linkedin.metadata.event; import com.linkedin.common.urn.Urn; -import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.snapshot.Snapshot; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; import com.linkedin.mxe.MetadataChangeLog; -import com.linkedin.mxe.MetadataAuditOperation; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; -import com.linkedin.mxe.SystemMetadata; import io.opentelemetry.extension.annotations.WithSpan; import java.util.concurrent.Future; import javax.annotation.Nonnull; @@ -22,27 +18,6 @@ */ public interface EventProducer { - /** - * Deprecated! Replaced by {@link #produceMetadataChangeLog(Urn, AspectSpec, MetadataChangeLog)} - * - * Produces a {@link com.linkedin.mxe.MetadataAuditEvent} from a - * new & previous Entity {@link Snapshot}. - * @param urn the urn associated with the entity changed - * @param oldSnapshot a {@link RecordTemplate} corresponding to the old snapshot. - * @param newSnapshot a {@link RecordTemplate} corresponding to the new snapshot. - * @param oldSystemMetadata - * @param newSystemMetadata - */ - @Deprecated - void produceMetadataAuditEvent( - @Nonnull final Urn urn, - @Nullable final Snapshot oldSnapshot, - @Nonnull final Snapshot newSnapshot, - @Nullable SystemMetadata oldSystemMetadata, - @Nullable SystemMetadata newSystemMetadata, - MetadataAuditOperation operation - ); - /** * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a * new & previous aspect. diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java index 2361bcc22780a..e95378a616d97 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java @@ -5,8 +5,12 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import com.linkedin.metadata.key.CorpUserKey; import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; import java.util.Map; import javax.annotation.Nonnull; @@ -25,12 +29,19 @@ public static Map ingestCorpUserKeyAspects(EntityService entit public static Map ingestCorpUserKeyAspects(EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new CorpUserKey()); Map aspects = new HashMap<>(); + List items = new LinkedList<>(); for (int i = startIndex; i < startIndex + aspectCount; i++) { Urn urn = UrnUtils.getUrn(String.format("urn:li:corpuser:tester%d", i)); CorpUserKey aspect = AspectGenerationUtils.createCorpUserKey(urn); aspects.put(urn, aspect); - entityService.ingestAspect(urn, aspectName, aspect, AspectGenerationUtils.createAuditStamp(), AspectGenerationUtils.createSystemMetadata()); + items.add(UpsertBatchItem.builder() + .urn(urn) + .aspectName(aspectName) + .aspect(aspect) + .systemMetadata(AspectGenerationUtils.createSystemMetadata()) + .build(entityService.getEntityRegistry())); } + entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); return aspects; } @@ -43,13 +54,20 @@ public static Map ingestCorpUserInfoAspects(@Nonnull final En public static Map ingestCorpUserInfoAspects(@Nonnull final EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); Map aspects = new HashMap<>(); + List items = new LinkedList<>(); for (int i = startIndex; i < startIndex + aspectCount; i++) { Urn urn = UrnUtils.getUrn(String.format("urn:li:corpuser:tester%d", i)); String email = String.format("email%d@test.com", i); CorpUserInfo aspect = AspectGenerationUtils.createCorpUserInfo(email); aspects.put(urn, aspect); - entityService.ingestAspect(urn, aspectName, aspect, AspectGenerationUtils.createAuditStamp(), AspectGenerationUtils.createSystemMetadata()); + items.add(UpsertBatchItem.builder() + .urn(urn) + .aspectName(aspectName) + .aspect(aspect) + .systemMetadata(AspectGenerationUtils.createSystemMetadata()) + .build(entityService.getEntityRegistry())); } + entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); return aspects; } @@ -62,14 +80,21 @@ public static Map ingestChartInfoAspects(@Nonnull final EntitySe public static Map ingestChartInfoAspects(@Nonnull final EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new ChartInfo()); Map aspects = new HashMap<>(); + List items = new LinkedList<>(); for (int i = startIndex; i < startIndex + aspectCount; i++) { Urn urn = UrnUtils.getUrn(String.format("urn:li:chart:(looker,test%d)", i)); String title = String.format("Test Title %d", i); String description = String.format("Test description %d", i); ChartInfo aspect = AspectGenerationUtils.createChartInfo(title, description); aspects.put(urn, aspect); - entityService.ingestAspect(urn, aspectName, aspect, AspectGenerationUtils.createAuditStamp(), AspectGenerationUtils.createSystemMetadata()); + items.add(UpsertBatchItem.builder() + .urn(urn) + .aspectName(aspectName) + .aspect(aspect) + .systemMetadata(AspectGenerationUtils.createSystemMetadata()) + .build(entityService.getEntityRegistry())); } + entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); return aspects; } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java index 36ebec5a42849..46d08bc8887b9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java @@ -18,7 +18,7 @@ import com.linkedin.metadata.snapshot.Snapshot; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; -import io.ebean.EbeanServer; +import io.ebean.Database; import java.util.List; import org.testng.Assert; import org.testng.annotations.Test; @@ -39,7 +39,7 @@ public AspectUtilsTest() throws EntityRegistryException { @Test public void testAdditionalChanges() { - EbeanServer server = EbeanTestUtils.createTestServer(); + Database server = EbeanTestUtils.createTestServer(); EbeanAspectDao aspectDao = new EbeanAspectDao(server); aspectDao.setConnectionValidated(true); EventProducer mockProducer = mock(EventProducer.class); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/ESTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/ESTestUtils.java index 45c4c16864b07..7e9605cbe3db0 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/ESTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/ESTestUtils.java @@ -34,8 +34,9 @@ import org.testcontainers.elasticsearch.ElasticsearchContainer; import org.testcontainers.utility.DockerImageName; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static com.linkedin.metadata.DockerTestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; public class ESTestUtils { private ESTestUtils() { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java index d8d7efeff87d4..180166e963fca 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java @@ -1,7 +1,7 @@ package com.linkedin.metadata; -import io.ebean.EbeanServer; -import io.ebean.EbeanServerFactory; +import io.ebean.Database; +import io.ebean.DatabaseFactory; import io.ebean.config.ServerConfig; import io.ebean.datasource.DataSourceConfig; @@ -13,8 +13,8 @@ private EbeanTestUtils() { } @Nonnull - public static EbeanServer createTestServer() { - return EbeanServerFactory.create(createTestingH2ServerConfig()); + public static Database createTestServer() { + return DatabaseFactory.create(createTestingH2ServerConfig()); } @Nonnull @@ -22,7 +22,7 @@ private static ServerConfig createTestingH2ServerConfig() { DataSourceConfig dataSourceConfig = new DataSourceConfig(); dataSourceConfig.setUsername("tester"); dataSourceConfig.setPassword(""); - dataSourceConfig.setUrl("jdbc:h2:mem:;IGNORECASE=TRUE;"); + dataSourceConfig.setUrl("jdbc:h2:mem:test;IGNORECASE=TRUE;mode=mysql;"); dataSourceConfig.setDriver("org.h2.Driver"); ServerConfig serverConfig = new ServerConfig(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java index 62f8827b574b8..9e453e6e75677 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java @@ -7,7 +7,7 @@ import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.registry.EntityRegistryException; import com.linkedin.metadata.service.UpdateIndicesService; -import io.ebean.EbeanServer; +import io.ebean.Database; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -22,7 +22,7 @@ public EbeanAspectMigrationsDaoTest() throws EntityRegistryException { @BeforeMethod public void setupTest() { - EbeanServer server = EbeanTestUtils.createTestServer(); + Database server = EbeanTestUtils.createTestServer(); _mockProducer = mock(EventProducer.class); EbeanAspectDao dao = new EbeanAspectDao(server); dao.setConnectionValidated(true); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java index 9126aad62895d..90f9baa4ca4c2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity; +import com.linkedin.common.AuditStamp; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,21 +12,36 @@ import com.linkedin.metadata.EbeanTestUtils; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.key.CorpUserKey; import com.linkedin.metadata.models.registry.EntityRegistryException; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.service.UpdateIndicesService; import com.linkedin.metadata.utils.PegasusUtils; +import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import io.ebean.EbeanServer; +import io.datahub.test.DataGenerator; +import io.ebean.Database; import io.ebean.Transaction; import io.ebean.TxScope; import io.ebean.annotation.TxIsolation; +import org.apache.commons.lang3.tuple.Triple; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; +import java.net.URISyntaxException; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + import static org.mockito.Mockito.mock; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; @@ -44,10 +61,10 @@ public EbeanEntityServiceTest() throws EntityRegistryException { @BeforeMethod public void setupTest() { - EbeanServer server = EbeanTestUtils.createTestServer(); + Database server = EbeanTestUtils.createTestServer(); _mockProducer = mock(EventProducer.class); _aspectDao = new EbeanAspectDao(server); - _aspectDao.setConnectionValidated(true); + _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); @@ -86,15 +103,34 @@ public void testIngestListLatestAspects() throws AssertionError { // Ingest CorpUserInfo Aspect #1 CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - _entityServiceImpl.ingestAspect(entityUrn1, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); // Ingest CorpUserInfo Aspect #2 CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - _entityServiceImpl.ingestAspect(entityUrn2, aspectName, writeAspect2, TEST_AUDIT_STAMP, metadata1); // Ingest CorpUserInfo Aspect #3 CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - _entityServiceImpl.ingestAspect(entityUrn3, aspectName, writeAspect3, TEST_AUDIT_STAMP, metadata1); + + List items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // List aspects ListResult batch1 = _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 0, 2); @@ -131,15 +167,34 @@ public void testIngestListUrns() throws AssertionError { // Ingest CorpUserInfo Aspect #1 RecordTemplate writeAspect1 = AspectGenerationUtils.createCorpUserKey(entityUrn1); - _entityServiceImpl.ingestAspect(entityUrn1, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); // Ingest CorpUserInfo Aspect #2 RecordTemplate writeAspect2 = AspectGenerationUtils.createCorpUserKey(entityUrn2); - _entityServiceImpl.ingestAspect(entityUrn2, aspectName, writeAspect2, TEST_AUDIT_STAMP, metadata1); // Ingest CorpUserInfo Aspect #3 RecordTemplate writeAspect3 = AspectGenerationUtils.createCorpUserKey(entityUrn3); - _entityServiceImpl.ingestAspect(entityUrn3, aspectName, writeAspect3, TEST_AUDIT_STAMP, metadata1); + + List items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // List aspects urns ListUrnsResult batch1 = _entityServiceImpl.listUrns(entityUrn1.getEntityType(), 0, 2); @@ -163,7 +218,7 @@ public void testIngestListUrns() throws AssertionError { @Override @Test public void testNestedTransactions() throws AssertionError { - EbeanServer server = _aspectDao.getServer(); + Database server = _aspectDao.getServer(); try (Transaction transaction = server.beginTransaction(TxScope.requiresNew() .setIsolation(TxIsolation.REPEATABLE_READ))) { @@ -183,4 +238,178 @@ public void testNestedTransactions() throws AssertionError { } System.out.println("done"); } + + @Test + public void dataGeneratorThreadingTest() { + DataGenerator dataGenerator = new DataGenerator(_entityServiceImpl); + List aspects = List.of("status", "globalTags", "glossaryTerms"); + List> testData = dataGenerator.generateMCPs("dataset", 25, aspects) + .collect(Collectors.toList()); + + // Expected no duplicates aspects + List duplicates = testData.stream() + .flatMap(Collection::stream) + .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) + .collect(Collectors.groupingBy(Triple::toString)) + .entrySet().stream() + .filter(e -> e.getValue().size() > 1) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); + assertEquals(duplicates.size(), 0, duplicates.toString()); + } + + /** + * This test is designed to detect multi-threading persistence exceptions like duplicate key, + * exceptions that exceed retry limits or unnecessary versions. + */ + @Test + public void multiThreadingTest() { + DataGenerator dataGenerator = new DataGenerator(_entityServiceImpl); + Database server = ((EbeanAspectDao) _entityServiceImpl._aspectDao).getServer(); + + // Add data + List aspects = List.of("status", "globalTags", "glossaryTerms"); + List> testData = dataGenerator.generateMCPs("dataset", 25, aspects) + .collect(Collectors.toList()); + + executeThreadingTest(_entityServiceImpl, testData, 15); + + // Expected aspects + Set> generatedAspectIds = testData.stream() + .flatMap(Collection::stream) + .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) + .collect(Collectors.toSet()); + + // Actual inserts + Set> actualAspectIds = server.sqlQuery( + "select urn, aspect, version from metadata_aspect_v2").findList().stream() + .map(row -> Triple.of(row.getString("urn"), row.getString("aspect"), row.getLong("version"))) + .collect(Collectors.toSet()); + + // Assert State + Set> additions = actualAspectIds.stream() + .filter(id -> !generatedAspectIds.contains(id)) + .collect(Collectors.toSet()); + assertEquals(additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); + + Set> missing = generatedAspectIds.stream() + .filter(id -> !actualAspectIds.contains(id)) + .collect(Collectors.toSet()); + assertEquals(missing.size(), 0, String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); + } + + /** + * Don't blame multi-threading for what might not be a threading issue. + * Perform the multi-threading test with 1 thread. + */ + @Test + public void singleThreadingTest() { + DataGenerator dataGenerator = new DataGenerator(_entityServiceImpl); + Database server = ((EbeanAspectDao) _entityServiceImpl._aspectDao).getServer(); + + // Add data + List aspects = List.of("status", "globalTags", "glossaryTerms"); + List> testData = dataGenerator.generateMCPs("dataset", 25, aspects) + .collect(Collectors.toList()); + + executeThreadingTest(_entityServiceImpl, testData, 1); + + // Expected aspects + Set> generatedAspectIds = testData.stream() + .flatMap(Collection::stream) + .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) + .collect(Collectors.toSet()); + + // Actual inserts + Set> actualAspectIds = server.sqlQuery( + "select urn, aspect, version from metadata_aspect_v2").findList().stream() + .map(row -> Triple.of(row.getString("urn"), row.getString("aspect"), row.getLong("version"))) + .collect(Collectors.toSet()); + + // Assert State + Set> additions = actualAspectIds.stream() + .filter(id -> !generatedAspectIds.contains(id)) + .collect(Collectors.toSet()); + assertEquals(additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); + + Set> missing = generatedAspectIds.stream() + .filter(id -> !actualAspectIds.contains(id)) + .collect(Collectors.toSet()); + assertEquals(missing.size(), 0, String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); + } + + private static void executeThreadingTest(EntityServiceImpl entityService, List> testData, + int threadCount) { + Database server = ((EbeanAspectDao) entityService._aspectDao).getServer(); + server.sqlUpdate("truncate metadata_aspect_v2"); + + int count = Objects.requireNonNull(server.sqlQuery( + "select count(*) as cnt from metadata_aspect_v2").findOne()).getInteger("cnt"); + assertEquals(count, 0, "Expected exactly 0 rows at the start."); + + // Create ingest proposals in parallel, mimic the smoke-test ingestion + final LinkedBlockingQueue> queue = new LinkedBlockingQueue<>(threadCount * 2); + + // Spin up workers + List writeThreads = IntStream.range(0, threadCount) + .mapToObj(threadId -> new Thread(new MultiThreadTestWorker(queue, entityService))) + .collect(Collectors.toList()); + writeThreads.forEach(Thread::start); + + testData.forEach(mcps -> { + try { + queue.put(mcps); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + + // Terminate workers with empty mcp + IntStream.range(0, threadCount).forEach(threadId -> { + try { + queue.put(List.of()); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + + // Wait for threads to finish + writeThreads.forEach(thread -> { + try { + thread.join(10000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + } + + private static class MultiThreadTestWorker implements Runnable { + private final EntityServiceImpl entityService; + private final LinkedBlockingQueue> queue; + + public MultiThreadTestWorker(LinkedBlockingQueue> queue, EntityServiceImpl entityService) { + this.queue = queue; + this.entityService = entityService; + } + + public void run() { + try { + while (true) { + List mcps = queue.take(); + if (mcps.isEmpty()) { + break; + } + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)); + auditStamp.setTime(System.currentTimeMillis()); + AspectsBatchImpl batch = AspectsBatchImpl.builder() + .mcps(mcps, entityService.getEntityRegistry()) + .build(); + entityService.ingestProposal(batch, auditStamp, false); + } + } catch (InterruptedException | URISyntaxException ie) { + throw new RuntimeException(ie); + } + } + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java index d485981f32a07..c0d2a3783c0a7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java @@ -32,6 +32,8 @@ import com.linkedin.metadata.aspect.CorpUserAspect; import com.linkedin.metadata.aspect.CorpUserAspectArray; import com.linkedin.metadata.aspect.VersionedAspect; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.key.CorpUserKey; @@ -46,7 +48,6 @@ import com.linkedin.metadata.snapshot.Snapshot; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.GenericAspect; -import com.linkedin.mxe.MetadataAuditOperation; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; @@ -146,9 +147,6 @@ public void testIngestGetEntity() throws Exception { assertNull(mcl.getPreviousSystemMetadata()); assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - verify(_mockProducer, times(2)).produceMetadataAuditEvent(Mockito.eq(entityUrn), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verifyNoMoreInteractions(_mockProducer); } @@ -183,9 +181,6 @@ public void testAddKey() throws Exception { assertNull(mcl.getPreviousSystemMetadata()); assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - verify(_mockProducer, times(2)).produceMetadataAuditEvent(Mockito.eq(entityUrn), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verifyNoMoreInteractions(_mockProducer); } @@ -254,12 +249,6 @@ public void testIngestGetEntities() throws Exception { assertNull(mcl.getPreviousSystemMetadata()); assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - verify(_mockProducer, times(2)).produceMetadataAuditEvent(Mockito.eq(entityUrn1), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - - verify(_mockProducer, times(2)).produceMetadataAuditEvent(Mockito.eq(entityUrn2), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verifyNoMoreInteractions(_mockProducer); } @@ -314,12 +303,6 @@ public void testIngestGetEntitiesV2() throws Exception { EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); assertTrue(DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); - verify(_mockProducer, times(2)).produceMetadataAuditEvent(Mockito.eq(entityUrn1), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - - verify(_mockProducer, times(2)).produceMetadataAuditEvent(Mockito.eq(entityUrn2), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), Mockito.any()); @@ -382,12 +365,6 @@ public void testIngestGetEntitiesVersionedV2() throws Exception { EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); assertTrue(DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); - verify(_mockProducer, times(2)).produceMetadataAuditEvent(Mockito.eq(entityUrn1), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - - verify(_mockProducer, times(2)).produceMetadataAuditEvent(Mockito.eq(entityUrn2), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), Mockito.any()); @@ -413,7 +390,6 @@ public void testIngestAspectsGetLatestAspects() throws Exception { pairToIngest.add(getAspectRecordPair(writeAspect2, CorpUserInfo.class)); SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); Map latestAspects = _entityServiceImpl.getLatestAspectsForUrn( @@ -425,8 +401,6 @@ public void testIngestAspectsGetLatestAspects() throws Exception { verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); - verify(_mockProducer, times(2)).produceMetadataAuditEvent(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); verifyNoMoreInteractions(_mockProducer); } @@ -444,7 +418,6 @@ public void testReingestAspectsGetLatestAspects() throws Exception { pairToIngest.add(getAspectRecordPair(writeAspect1, CorpUserInfo.class)); SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); @@ -478,8 +451,6 @@ public void testReingestAspectsGetLatestAspects() throws Exception { verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared clearInvocations(_mockProducer); @@ -488,8 +459,6 @@ public void testReingestAspectsGetLatestAspects() throws Exception { verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); verifyNoMoreInteractions(_mockProducer); @@ -541,8 +510,6 @@ public void testReingestLineageAspect() throws Exception { verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared clearInvocations(_mockProducer); @@ -551,8 +518,6 @@ public void testReingestLineageAspect() throws Exception { verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); verifyNoMoreInteractions(_mockProducer); @@ -704,9 +669,9 @@ public void testUpdateGetAspect() throws AssertionError { CorpUserInfo writeAspect = AspectGenerationUtils.createCorpUserInfo("email@test.com"); // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.updateAspect(entityUrn, "corpuser", aspectName, corpUserInfoSpec, writeAspect, TEST_AUDIT_STAMP, 1, - true); - RecordTemplate readAspect1 = _entityServiceImpl.getAspect(entityUrn, aspectName, 1); + _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); + + RecordTemplate readAspect1 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect1)); verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); @@ -715,10 +680,13 @@ public void testUpdateGetAspect() throws AssertionError { writeAspect.setEmail("newemail@test.com"); // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.updateAspect(entityUrn, "corpuser", aspectName, corpUserInfoSpec, writeAspect, TEST_AUDIT_STAMP, 1, - false); - RecordTemplate readAspect2 = _entityServiceImpl.getAspect(entityUrn, aspectName, 1); + _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); + + RecordTemplate readAspect2 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect2)); + verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), + Mockito.any()); + verifyNoMoreInteractions(_mockProducer); } @@ -731,26 +699,39 @@ public void testGetAspectAtVersion() throws AssertionError { AspectSpec corpUserInfoSpec = _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.updateAspect(entityUrn, "corpuser", aspectName, corpUserInfoSpec, writeAspect, TEST_AUDIT_STAMP, 1, - true); + _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect1)), TEST_AUDIT_STAMP, null); - VersionedAspect writtenVersionedAspect = new VersionedAspect(); - writtenVersionedAspect.setAspect(Aspect.create(writeAspect)); - writtenVersionedAspect.setVersion(1); + VersionedAspect writtenVersionedAspect1 = new VersionedAspect(); + writtenVersionedAspect1.setAspect(Aspect.create(writeAspect1)); + writtenVersionedAspect1.setVersion(0); - VersionedAspect readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 1); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect, readAspect1)); + VersionedAspect readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); - VersionedAspect readAspect2 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect, readAspect2)); + readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + + // Validate retrieval of CorpUserInfo Aspect #2 + _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect2)), TEST_AUDIT_STAMP, null); + + VersionedAspect writtenVersionedAspect2 = new VersionedAspect(); + writtenVersionedAspect2.setAspect(Aspect.create(writeAspect2)); + writtenVersionedAspect2.setVersion(0); + + VersionedAspect readAspectVersion2 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); + assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspectVersion2)); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect2, readAspectVersion2)); + verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), + Mockito.any()); - VersionedAspect readAspectVersion0 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); - assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect, readAspectVersion0)); + readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); + assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); verifyNoMoreInteractions(_mockProducer); } @@ -768,19 +749,43 @@ public void testRollbackAspect() throws AssertionError { // Ingest CorpUserInfo Aspect #1 CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - _entityServiceImpl.ingestAspect(entityUrn1, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); // Ingest CorpUserInfo Aspect #2 CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - _entityServiceImpl.ingestAspect(entityUrn2, aspectName, writeAspect2, TEST_AUDIT_STAMP, metadata1); // Ingest CorpUserInfo Aspect #3 CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - _entityServiceImpl.ingestAspect(entityUrn3, aspectName, writeAspect3, TEST_AUDIT_STAMP, metadata1); // Ingest CorpUserInfo Aspect #1 Overwrite CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - _entityServiceImpl.ingestAspect(entityUrn1, aspectName, writeAspect1Overwrite, TEST_AUDIT_STAMP, metadata2); + + List items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // this should no-op since this run has been overwritten AspectRowSummary rollbackOverwrittenAspect = new AspectRowSummary(); @@ -822,14 +827,33 @@ public void testRollbackKey() throws AssertionError { // Ingest CorpUserInfo Aspect #1 CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - _entityServiceImpl.ingestAspect(entityUrn1, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); - RecordTemplate writeKey1 = _entityServiceImpl.buildKeyAspect(entityUrn1); - _entityServiceImpl.ingestAspect(entityUrn1, keyAspectName, writeKey1, TEST_AUDIT_STAMP, metadata1); + RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); // Ingest CorpUserInfo Aspect #1 Overwrite CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - _entityServiceImpl.ingestAspect(entityUrn1, aspectName, writeAspect1Overwrite, TEST_AUDIT_STAMP, metadata2); + + List items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(keyAspectName) + .aspect(writeKey1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // this should no-op since the key should have been written in the furst run AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); @@ -873,22 +897,51 @@ public void testRollbackUrn() throws AssertionError { // Ingest CorpUserInfo Aspect #1 CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - _entityServiceImpl.ingestAspect(entityUrn1, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); - RecordTemplate writeKey1 = _entityServiceImpl.buildKeyAspect(entityUrn1); - _entityServiceImpl.ingestAspect(entityUrn1, keyAspectName, writeKey1, TEST_AUDIT_STAMP, metadata1); + RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); // Ingest CorpUserInfo Aspect #2 CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - _entityServiceImpl.ingestAspect(entityUrn2, aspectName, writeAspect2, TEST_AUDIT_STAMP, metadata1); // Ingest CorpUserInfo Aspect #3 CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - _entityServiceImpl.ingestAspect(entityUrn3, aspectName, writeAspect3, TEST_AUDIT_STAMP, metadata1); // Ingest CorpUserInfo Aspect #1 Overwrite CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - _entityServiceImpl.ingestAspect(entityUrn1, aspectName, writeAspect1Overwrite, TEST_AUDIT_STAMP, metadata2); + + List items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(keyAspectName) + .aspect(writeKey1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // this should no-op since the key should have been written in the furst run AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); @@ -918,8 +971,17 @@ public void testIngestGetLatestAspect() throws AssertionError { SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + List items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); @@ -931,9 +993,6 @@ public void testIngestGetLatestAspect() throws AssertionError { assertNull(mcl.getPreviousSystemMetadata()); assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), Mockito.any(), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verifyNoMoreInteractions(_mockProducer); reset(_mockProducer); @@ -941,8 +1000,17 @@ public void testIngestGetLatestAspect() throws AssertionError { // Ingest CorpUserInfo Aspect #2 CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect2, TEST_AUDIT_STAMP, metadata2); RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); @@ -958,9 +1026,6 @@ public void testIngestGetLatestAspect() throws AssertionError { assertNotNull(mcl.getPreviousSystemMetadata()); assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), Mockito.notNull(), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verifyNoMoreInteractions(_mockProducer); } @@ -975,16 +1040,34 @@ public void testIngestGetLatestEnvelopedAspect() throws Exception { SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + List items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); EnvelopedAspect readAspect1 = _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); assertTrue(DataTemplateUtil.areEqual(writeAspect1, new CorpUserInfo(readAspect1.getValue().data()))); // Ingest CorpUserInfo Aspect #2 CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect2, TEST_AUDIT_STAMP, metadata2); EnvelopedAspect readAspect2 = _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); @@ -993,12 +1076,6 @@ public void testIngestGetLatestEnvelopedAspect() throws Exception { assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), Mockito.notNull(), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); @@ -1017,8 +1094,17 @@ public void testIngestSameAspect() throws AssertionError { SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); SystemMetadata metadata3 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-123"); + List items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); @@ -1030,9 +1116,6 @@ public void testIngestSameAspect() throws AssertionError { assertNull(mcl.getPreviousSystemMetadata()); assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), Mockito.eq(null), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verifyNoMoreInteractions(_mockProducer); reset(_mockProducer); @@ -1040,8 +1123,17 @@ public void testIngestSameAspect() throws AssertionError { // Ingest CorpUserInfo Aspect #2 CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect2, TEST_AUDIT_STAMP, metadata2); RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, ASPECT_LATEST_VERSION); @@ -1053,9 +1145,6 @@ public void testIngestSameAspect() throws AssertionError { verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - verify(_mockProducer, times(1)).produceMetadataAuditEvent(Mockito.eq(entityUrn), Mockito.notNull(), Mockito.any(), - Mockito.any(), Mockito.any(), Mockito.eq(MetadataAuditOperation.UPDATE)); - verifyNoMoreInteractions(_mockProducer); } @@ -1069,20 +1158,54 @@ public void testRetention() throws AssertionError { // Ingest CorpUserInfo Aspect CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect1a, TEST_AUDIT_STAMP, metadata1); CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect1b, TEST_AUDIT_STAMP, metadata1); String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); // Ingest Status Aspect Status writeAspect2 = new Status().setRemoved(true); - _entityServiceImpl.ingestAspect(entityUrn, aspectName2, writeAspect2, TEST_AUDIT_STAMP, metadata1); Status writeAspect2a = new Status().setRemoved(false); - _entityServiceImpl.ingestAspect(entityUrn, aspectName2, writeAspect2a, TEST_AUDIT_STAMP, metadata1); Status writeAspect2b = new Status().setRemoved(true); - _entityServiceImpl.ingestAspect(entityUrn, aspectName2, writeAspect2b, TEST_AUDIT_STAMP, metadata1); + + List items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1a) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1b) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2a) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2b) + .systemMetadata(metadata1) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 1), writeAspect1); assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); @@ -1094,10 +1217,24 @@ public void testRetention() throws AssertionError { // Ingest CorpUserInfo Aspect again CorpUserInfo writeAspect1c = AspectGenerationUtils.createCorpUserInfo("email_c@test.com"); - _entityServiceImpl.ingestAspect(entityUrn, aspectName, writeAspect1c, TEST_AUDIT_STAMP, metadata1); // Ingest Status Aspect again Status writeAspect2c = new Status().setRemoved(false); - _entityServiceImpl.ingestAspect(entityUrn, aspectName2, writeAspect2c, TEST_AUDIT_STAMP, metadata1); + + items = List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1c) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2c) + .systemMetadata(metadata1) + .build(_testEntityRegistry) + ); + _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); @@ -1203,12 +1340,12 @@ public void testRestoreIndices() throws Exception { public void testValidateUrn() throws Exception { // Valid URN Urn validTestUrn = new Urn("li", "corpuser", new TupleKey("testKey")); - _entityServiceImpl.validateUrn(validTestUrn); + EntityUtils.validateUrn(_testEntityRegistry, validTestUrn); // URN with trailing whitespace Urn testUrnWithTrailingWhitespace = new Urn("li", "corpuser", new TupleKey("testKey ")); try { - _entityServiceImpl.validateUrn(testUrnWithTrailingWhitespace); + EntityUtils.validateUrn(_testEntityRegistry, testUrnWithTrailingWhitespace); Assert.fail("Should have raised IllegalArgumentException for URN with trailing whitespace"); } catch (IllegalArgumentException e) { assertEquals(e.getMessage(), "Error: cannot provide an URN with leading or trailing whitespace"); @@ -1219,7 +1356,7 @@ public void testValidateUrn() throws Exception { Urn testUrnTooLong = new Urn("li", "corpuser", new TupleKey(stringTooLong)); try { - _entityServiceImpl.validateUrn(testUrnTooLong); + EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLong); Assert.fail("Should have raised IllegalArgumentException for URN too long"); } catch (IllegalArgumentException e) { assertEquals(e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); @@ -1235,9 +1372,9 @@ public void testValidateUrn() throws Exception { Urn testUrnTooLongWhenEncoded = new Urn("li", "corpUser", new TupleKey(buildStringTooLongWhenEncoded.toString())); Urn testUrnSameLengthWhenEncoded = new Urn("li", "corpUser", new TupleKey(buildStringSameLengthWhenEncoded.toString())); // Same length when encoded should be allowed, the encoded one should not be - _entityServiceImpl.validateUrn(testUrnSameLengthWhenEncoded); + EntityUtils.validateUrn(_testEntityRegistry, testUrnSameLengthWhenEncoded); try { - _entityServiceImpl.validateUrn(testUrnTooLongWhenEncoded); + EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLongWhenEncoded); Assert.fail("Should have raised IllegalArgumentException for URN too long"); } catch (IllegalArgumentException e) { assertEquals(e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); @@ -1246,9 +1383,9 @@ public void testValidateUrn() throws Exception { // Urn containing disallowed character Urn testUrnSpecialCharValid = new Urn("li", "corpUser", new TupleKey("bob␇")); Urn testUrnSpecialCharInvalid = new Urn("li", "corpUser", new TupleKey("bob␟")); - _entityServiceImpl.validateUrn(testUrnSpecialCharValid); + EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharValid); try { - _entityServiceImpl.validateUrn(testUrnSpecialCharInvalid); + EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharInvalid); Assert.fail("Should have raised IllegalArgumentException for URN containing the illegal char"); } catch (IllegalArgumentException e) { assertEquals(e.getMessage(), "Error: URN cannot contain ␟ character"); @@ -1256,7 +1393,7 @@ public void testValidateUrn() throws Exception { Urn urnWithMismatchedParens = new Urn("li", "corpuser", new TupleKey("test(Key")); try { - _entityServiceImpl.validateUrn(urnWithMismatchedParens); + EntityUtils.validateUrn(_testEntityRegistry, urnWithMismatchedParens); Assert.fail("Should have raised IllegalArgumentException for URN with mismatched parens"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("mismatched paren nesting")); @@ -1264,18 +1401,18 @@ public void testValidateUrn() throws Exception { Urn invalidType = new Urn("li", "fakeMadeUpType", new TupleKey("testKey")); try { - _entityServiceImpl.validateUrn(invalidType); + EntityUtils.validateUrn(_testEntityRegistry, invalidType); Assert.fail("Should have raised IllegalArgumentException for URN with non-existent entity type"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Failed to find entity with name fakeMadeUpType")); } Urn validFabricType = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD")); - _entityServiceImpl.validateUrn(validFabricType); + EntityUtils.validateUrn(_testEntityRegistry, validFabricType); Urn invalidFabricType = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "prod")); try { - _entityServiceImpl.validateUrn(invalidFabricType); + EntityUtils.validateUrn(_testEntityRegistry, invalidFabricType); Assert.fail("Should have raised IllegalArgumentException for URN with invalid fabric type"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains(invalidFabricType.toString())); @@ -1283,7 +1420,7 @@ public void testValidateUrn() throws Exception { Urn urnEndingInComma = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD", "")); try { - _entityServiceImpl.validateUrn(urnEndingInComma); + EntityUtils.validateUrn(_testEntityRegistry, urnEndingInComma); Assert.fail("Should have raised IllegalArgumentException for URN ending in comma"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains(urnEndingInComma.toString())); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java index b431f786cd50a..2703dd7fe6cbe 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java @@ -6,7 +6,7 @@ import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.registry.EntityRegistryException; -import io.ebean.EbeanServer; +import io.ebean.Database; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -27,7 +27,7 @@ public EbeanTimelineServiceTest() throws EntityRegistryException { @BeforeMethod public void setupTest() { - EbeanServer server = EbeanTestUtils.createTestServer(); + Database server = EbeanTestUtils.createTestServer(); _aspectDao = new EbeanAspectDao(server); _aspectDao.setConnectionValidated(true); _entityTimelineService = new TimelineServiceImpl(_aspectDao, _testEntityRegistry); diff --git a/metadata-io/src/test/java/io/datahub/test/DataGenerator.java b/metadata-io/src/test/java/io/datahub/test/DataGenerator.java new file mode 100644 index 0000000000000..3b374993cde16 --- /dev/null +++ b/metadata-io/src/test/java/io/datahub/test/DataGenerator.java @@ -0,0 +1,359 @@ +package io.datahub.test; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.GlossaryTermAssociation; +import com.linkedin.common.GlossaryTermAssociationArray; +import com.linkedin.common.TagAssociation; +import com.linkedin.common.TagAssociationArray; +import com.linkedin.common.urn.GlossaryTermUrn; +import com.linkedin.common.urn.TagUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.AspectUtils; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import net.datafaker.Faker; +import com.linkedin.mxe.MetadataChangeProposal; +import net.datafaker.providers.base.Animal; +import net.datafaker.providers.base.Cat; +import org.apache.commons.lang3.NotImplementedException; + +import javax.annotation.Nonnull; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.net.URISyntaxException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.function.BiFunction; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.LongStream; +import java.util.stream.Stream; + +public class DataGenerator { + private final static Faker FAKER = new Faker(); + private final EntityRegistry entityRegistry; + private final EntityService entityService; + + public DataGenerator(EntityService entityService) { + this.entityService = entityService; + this.entityRegistry = entityService.getEntityRegistry(); + } + + public Stream> generateDatasets() { + return generateMCPs("dataset", 10, List.of()); + } + + public Stream> generateMCPs(String entityName, long count, List aspects) { + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + + // Prevent duplicate tags and terms generated as secondary entities + Set secondaryUrns = new HashSet<>(); + + return LongStream.range(0, count).mapToObj(idx -> { + RecordTemplate key = randomKeyAspect(entitySpec); + MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityType(entitySpec.getName()); + mcp.setAspectName(entitySpec.getKeyAspectName()); + mcp.setAspect(GenericRecordUtils.serializeAspect(key)); + mcp.setEntityUrn(EntityKeyUtils.convertEntityKeyToUrn(key, entityName)); + mcp.setChangeType(ChangeType.UPSERT); + return mcp; + }).flatMap(mcp -> { + // Expand with additional random aspects + List additionalMCPs = new LinkedList<>(); + + for (String aspectName : aspects) { + AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); + if (aspectSpec == null) { + throw new IllegalStateException("Aspect " + aspectName + " not found for entity " + entityName); + } + + RecordTemplate aspect = randomAspectGenerators.getOrDefault(aspectName, + DataGenerator::defaultRandomAspect).apply(entitySpec, aspectSpec); + + // Maybe generate nested entities at the same time, like globalTags/glossaryTerms + List secondaryEntities = nestedRandomAspectGenerators.getOrDefault(aspectSpec.getName(), + (a, c) -> List.of()).apply(aspect, 5).stream() + .filter(secondaryMCP -> { + if (!secondaryUrns.contains(secondaryMCP.getEntityUrn())) { + secondaryUrns.add(secondaryMCP.getEntityUrn()); + return true; + } + return false; + }) + .collect(Collectors.toList()); + additionalMCPs.addAll(secondaryEntities); + + MetadataChangeProposal additionalMCP = new MetadataChangeProposal(); + additionalMCP.setEntityType(entitySpec.getName()); + additionalMCP.setAspectName(aspectName); + additionalMCP.setAspect(GenericRecordUtils.serializeAspect(aspect)); + additionalMCP.setEntityUrn(mcp.getEntityUrn()); + additionalMCP.setChangeType(ChangeType.UPSERT); + + additionalMCPs.add(additionalMCP); + } + + return Stream.concat(Stream.of(mcp), additionalMCPs.stream()); + }).map(mcp -> { + // Expand with default aspects per normal + return Stream.concat(Stream.of(mcp), + AspectUtils.getAdditionalChanges(mcp, entityService, true).stream()).collect(Collectors.toList()); + }); + } + + public static Map> randomAspectGenerators = Map.of( + "glossaryTermInfo", (e, a) -> { + GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) defaultRandomAspect(e, a); + glossaryTermInfo.setName(normalize(FAKER.company().buzzword())); + return glossaryTermInfo; + } + ); + + public Map>> nestedRandomAspectGenerators = Map.of( + "globalTags", (aspect, count) -> { + try { + List tags = generateMCPs("tag", count, List.of()) + .map(mcps -> mcps.get(0)) + .collect(Collectors.toList()); + Method setTagsMethod = aspect.getClass().getMethod("setTags", TagAssociationArray.class); + TagAssociationArray tagAssociations = new TagAssociationArray(); + tagAssociations.addAll(tags.stream().map( + tagMCP -> { + try { + return new TagAssociation().setTag(TagUrn.createFromUrn(tagMCP.getEntityUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + ).collect(Collectors.toList())); + setTagsMethod.invoke(aspect, tagAssociations); + return tags; + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "glossaryTerms", (aspect, count) -> { + try { + List terms = generateMCPs("glossaryTerm", count, + List.of("glossaryTermInfo")) + .map(mcps -> mcps.get(0)) + .collect(Collectors.toList()); + Method setTermsMethod = aspect.getClass().getMethod("setTerms", GlossaryTermAssociationArray.class); + GlossaryTermAssociationArray termAssociations = new GlossaryTermAssociationArray(); + termAssociations.addAll(terms.stream().map( + termMCP -> { + try { + return new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(termMCP.getEntityUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + ).collect(Collectors.toList())); + setTermsMethod.invoke(aspect, termAssociations); + return terms; + } catch (Exception e) { + throw new RuntimeException(e); + } + } + ); + + private static RecordTemplate defaultRandomAspect(@Nonnull EntitySpec entitySpec, @Nonnull AspectSpec aspectSpec) { + Class aspectClass = aspectSpec.getDataTemplateClass(); + try { + Object aspect = aspectClass.getDeclaredConstructor().newInstance(); + + List booleanMethods = Arrays.stream(aspectClass.getMethods()) + .filter(m -> m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == Boolean.class) + .collect(Collectors.toList()); + + for (Method boolMethod : booleanMethods) { + boolMethod.invoke(aspect, FAKER.random().nextBoolean()); + } + + List stringMethods = Arrays.stream(aspectClass.getMethods()) + .filter(m -> m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == String.class) + .collect(Collectors.toList()); + + for (Method stringMethod : stringMethods) { + String value; + switch (aspectSpec.getName() + "_" + stringMethod.getName()) { + default: + value = FAKER.lorem().characters(8, 16, false); + break; + } + + // global + if (stringMethod.getName().toLowerCase().contains("description") + || stringMethod.getName().toLowerCase().contains("definition")) { + value = FAKER.lorem().paragraph(); + } + + stringMethod.invoke(aspect, value); + } + + List enumMethods = Arrays.stream(aspectClass.getMethods()) + .filter(m -> m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0].isEnum()) + .collect(Collectors.toList()); + + for (Method enumMethod : enumMethods) { + Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); + // Excluding $UNKNOWNs + enumMethod.invoke(aspect, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); + } + + // auditStamp + Arrays.stream(aspectClass.getMethods()) + .filter(m -> m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == AuditStamp.class) + .findFirst().ifPresent(auditStampMethod -> { + try { + AuditStamp auditStamp = new AuditStamp() + .setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)) + .setTime(System.currentTimeMillis()); + auditStampMethod.invoke(aspect, auditStamp); + } catch (URISyntaxException | IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); + } + }); + + return aspectClass.cast(aspect); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private static RecordTemplate randomKeyAspect(EntitySpec entitySpec) { + Class keyClass = entitySpec.getKeyAspectSpec().getDataTemplateClass(); + try { + Object key = keyClass.getDeclaredConstructor().newInstance(); + + List stringMethods = Arrays.stream(keyClass.getMethods()) + .filter(m -> m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == String.class) + .collect(Collectors.toList()); + + switch (entitySpec.getName()) { + case "tag": + stringMethods.get(0).invoke(key, normalize(FAKER.marketing().buzzwords())); + break; + case "glossaryTerm": + stringMethods.get(0).invoke(key, normalize(UUID.randomUUID().toString())); + break; + case "container": + stringMethods.get(0).invoke(key, FAKER.examplify("b5e95fce839e7d78151ed7e0a7420d84")); + break; + default: + switch (stringMethods.size()) { + case 1: + stringMethods.get(0).invoke(key, String.join(".", multiName(3))); + break; + case 2: + Cat cat = FAKER.cat(); + stringMethods.get(0).invoke(key, cat.breed().toLowerCase()); + stringMethods.get(1).invoke(key, cat.name().toLowerCase()); + break; + default: + Animal animal = FAKER.animal(); + stringMethods.get(0).invoke(key, animal.genus().toLowerCase()); + stringMethods.get(1).invoke(key, animal.species().toLowerCase()); + stringMethods.get(2).invoke(key, animal.name().toLowerCase()); + break; + } + break; + } + + List urnMethods = Arrays.stream(keyClass.getMethods()) + .filter(m -> m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == Urn.class) + .collect(Collectors.toList()); + + for (Method urnMethod : urnMethods) { + switch (entitySpec.getName()) { + case "dataset": + urnMethod.invoke(key, randomUrnLowerCase("dataPlatform", + List.of(randomDataPlatform()))); + break; + default: + throw new NotImplementedException(entitySpec.getName()); + } + } + + List enumMethods = Arrays.stream(keyClass.getMethods()) + .filter(m -> m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0].isEnum()) + .collect(Collectors.toList()); + + for (Method enumMethod : enumMethods) { + Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); + // Excluding $UNKNOWNs + enumMethod.invoke(key, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); + } + + return keyClass.cast(key); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private static List multiName(int size) { + switch (size) { + case 1: + return Stream.of(FAKER.marketing().buzzwords()) + .map(String::toLowerCase).collect(Collectors.toList()); + case 2: + Cat cat = FAKER.cat(); + return Stream.of(cat.breed(), cat.name()) + .map(String::toLowerCase).collect(Collectors.toList()); + case 3: + Animal animal = FAKER.animal(); + return Stream.of(animal.genus(), animal.species(), animal.name()) + .map(String::toLowerCase).collect(Collectors.toList()); + default: + return IntStream.range(0, size).mapToObj(i -> FAKER.expression("#{numerify 'test####'}")).collect(Collectors.toList()); + } + } + + private static Urn randomUrnLowerCase(String entityType, List tuple) { + return Urn.createFromTuple(entityType, + tuple.stream().map(DataGenerator::normalize).collect(Collectors.toList())); + } + + private static String normalize(String input) { + return input.toLowerCase().replaceAll("\\W+", "_"); + } + + private static String randomDataPlatform() { + String[] platforms = { + "ambry", "bigquery", "couchbase", "druid", "external", "feast", "glue", "hdfs", "hive", "kafka", "kusto", + "looker", "mongodb", "mssql", "mysql", "oracle", "pinot", "postgres", "presto", "redshift", "s3", + "sagemaker", "snowflake", "teradata", "voldemort" + }; + + return platforms[FAKER.random().nextInt(0, platforms.length - 1)]; + } +} diff --git a/metadata-jobs/mae-consumer-job/build.gradle b/metadata-jobs/mae-consumer-job/build.gradle index ca099eea5a8a3..51c758f434328 100644 --- a/metadata-jobs/mae-consumer-job/build.gradle +++ b/metadata-jobs/mae-consumer-job/build.gradle @@ -58,7 +58,7 @@ docker { load(true) push(false) } -tasks.getByName("docker").dependsOn([bootJar]) +tasks.getByPath(":metadata-jobs:mae-consumer-job:docker").dependsOn([bootJar]) task cleanLocalDockerImages { doLast { diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java index 72665ffa0b76e..3b44ede0f1d43 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java @@ -8,7 +8,7 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService; -import io.ebean.EbeanServer; +import io.ebean.Database; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.context.annotation.Import; @@ -27,7 +27,7 @@ public class MaeConsumerApplicationTestConfiguration { private RestliEntityClient restliEntityClient; @MockBean - private EbeanServer ebeanServer; + private Database ebeanServer; @MockBean private EntityRegistry entityRegistry; diff --git a/metadata-jobs/mce-consumer-job/build.gradle b/metadata-jobs/mce-consumer-job/build.gradle index b72d4baff23d6..daf41a1e0303e 100644 --- a/metadata-jobs/mce-consumer-job/build.gradle +++ b/metadata-jobs/mce-consumer-job/build.gradle @@ -69,7 +69,7 @@ docker { load(true) push(false) } -tasks.getByName("docker").dependsOn([bootJar]) +tasks.getByPath(":metadata-jobs:mce-consumer-job:docker").dependsOn([bootJar]) task cleanLocalDockerImages { doLast { diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java index 2d09cf2043575..558a7b9d90ccb 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java @@ -11,7 +11,7 @@ import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; -import io.ebean.EbeanServer; +import io.ebean.Database; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; @@ -44,7 +44,7 @@ public RestliEntityClient restliEntityClient() { } @MockBean - public EbeanServer ebeanServer; + public Database ebeanServer; @MockBean protected TimeseriesAspectService timeseriesAspectService; diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index e37a351e0365a..125bba7ec3280 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -12,6 +12,7 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.key.DataHubAccessTokenKey; import com.linkedin.metadata.utils.AuditStampUtils; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -19,11 +20,12 @@ import java.util.Base64; import java.util.Date; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -124,11 +126,12 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final log.info("About to ingest access token metadata {}", proposal); final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp().setActor(UrnUtils.getUrn(actorUrn)); - // Need this to write key aspect - final List additionalChanges = AspectUtils.getAdditionalChanges(proposal, _entityService); + Stream proposalStream = Stream.concat(Stream.of(proposal), + AspectUtils.getAdditionalChanges(proposal, _entityService).stream()); - _entityService.ingestProposal(proposal, auditStamp, false); - additionalChanges.forEach(mcp -> _entityService.ingestProposal(mcp, auditStamp, false)); + _entityService.ingestProposal(AspectsBatchImpl.builder() + .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) + .build(), auditStamp, false); return accessToken; } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java index 44673f693b555..f5ce938c411c6 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java @@ -106,6 +106,7 @@ public void testAuthenticateSuccess() throws Exception { final AspectSpec keyAspectSpec = configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))).thenReturn(keyAspectSpec); Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); + Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java index 75a9114529d7a..1c46e864a559e 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java @@ -13,6 +13,7 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import java.util.Date; import java.util.Map; + import org.mockito.Mockito; import org.testng.annotations.Test; @@ -157,6 +158,7 @@ public void generateRevokeToken() throws TokenException { DataHubTokenAuthenticatorTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); final AspectSpec keyAspectSpec = configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); + Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))).thenReturn(keyAspectSpec); Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); final RollbackRunResult result = new RollbackRunResult(ImmutableList.of(), 0); @@ -174,4 +176,8 @@ public void generateRevokeToken() throws TokenException { // Validation should fail. assertThrows(TokenException.class, () -> tokenService.validateAccessToken(token)); } + + private void mockStateful() { + + } } diff --git a/metadata-service/auth-impl/src/test/resources/test-entity-registry.yaml b/metadata-service/auth-impl/src/test/resources/test-entity-registry.yaml index acdc5ead92fb1..48c1b9450bd7d 100644 --- a/metadata-service/auth-impl/src/test/resources/test-entity-registry.yaml +++ b/metadata-service/auth-impl/src/test/resources/test-entity-registry.yaml @@ -4,4 +4,14 @@ entities: keyAspect: dataHubAccessTokenKey aspects: - dataHubAccessTokenInfo + - name: corpuser + keyAspect: corpUserKey + aspects: + - corpUserInfo + - corpUserEditableInfo + - corpUserStatus + - groupMembership + - status + - corpUserCredentials + - corpUserSettings events: diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java index b7759d906f5b4..9feb7e469d018 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java @@ -1,7 +1,7 @@ package com.linkedin.gms.factory.entity; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; -import io.ebean.EbeanServer; +import io.ebean.Database; import io.ebean.config.ServerConfig; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -25,7 +25,7 @@ public class EbeanServerFactory { @DependsOn({"gmsEbeanServiceConfig"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull - protected EbeanServer createServer() { + protected Database createServer() { ServerConfig serverConfig = applicationContext.getBean(ServerConfig.class); // Make sure that the serverConfig includes the package that contains DAO's Ebean model. if (!serverConfig.getPackages().contains(EBEAN_MODEL_PACKAGE)) { @@ -33,7 +33,7 @@ protected EbeanServer createServer() { } // TODO: Consider supporting SCSI try { - return io.ebean.EbeanServerFactory.create(serverConfig); + return io.ebean.DatabaseFactory.create(serverConfig); } catch (NullPointerException ne) { log.error("Failed to connect to the server. Is it up?"); throw ne; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java index c4f0dae4c5fd5..925689c8609db 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java @@ -4,7 +4,7 @@ import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; -import io.ebean.EbeanServer; +import io.ebean.Database; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -19,7 +19,7 @@ public class EntityAspectDaoFactory { @DependsOn({"gmsEbeanServiceConfig"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull - protected AspectDao createEbeanInstance(EbeanServer server) { + protected AspectDao createEbeanInstance(Database server) { return new EbeanAspectDao(server); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java index 0e83c1af9c66a..4000f7d6ed058 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java @@ -4,7 +4,7 @@ import com.linkedin.metadata.entity.AspectMigrationsDao; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; -import io.ebean.EbeanServer; +import io.ebean.Database; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -19,7 +19,7 @@ public class EntityAspectMigrationsDaoFactory { @DependsOn({"gmsEbeanServiceConfig"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull - protected AspectMigrationsDao createEbeanInstance(EbeanServer server) { + protected AspectMigrationsDao createEbeanInstance(Database server) { return new EbeanAspectDao(server); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java index 737773d0972e2..b13bf5813d47e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java @@ -6,7 +6,7 @@ import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; -import io.ebean.EbeanServer; +import io.ebean.Database; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -46,7 +46,7 @@ protected RetentionService createCassandraInstance(CqlSession session) { @DependsOn({"ebeanServer", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull - protected RetentionService createEbeanInstance(EbeanServer server) { + protected RetentionService createEbeanInstance(Database server) { RetentionService retentionService = new EbeanRetentionService(_entityService, server, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java index 43b71d36e0e38..b26eb67465c0d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java @@ -13,10 +13,15 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.search.EntitySearchService; + import java.util.Collections; import java.util.HashSet; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -73,6 +78,7 @@ private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPla ); // Loop over Data platforms and produce changelog + List> futures = new LinkedList<>(); for (Urn dpUrn : dataPlatformUrns) { EntityResponse dataPlatformEntityResponse = dataPlatformInfoResponses.get(dpUrn); if (dataPlatformEntityResponse == null) { @@ -86,7 +92,7 @@ private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPla continue; } - _entityService.produceMetadataChangeLog( + futures.add(_entityService.alwaysProduceMCLAsync( dpUrn, Constants.DATA_PLATFORM_ENTITY_NAME, Constants.DATA_PLATFORM_INFO_ASPECT_NAME, @@ -96,9 +102,17 @@ private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPla null, null, auditStamp, - ChangeType.RESTATE); + ChangeType.RESTATE).getFirst()); } + futures.stream().filter(Objects::nonNull).forEach(f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + return listResult.getTotal(); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java index d5165713ab193..30608e984a0f2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java @@ -8,6 +8,8 @@ import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.AspectMigrationsDao; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.utils.DataPlatformInstanceUtils; import com.linkedin.metadata.utils.EntityKeyUtils; @@ -15,6 +17,8 @@ import lombok.extern.slf4j.Slf4j; import javax.annotation.Nonnull; +import java.util.LinkedList; +import java.util.List; import java.util.Optional; import static com.linkedin.metadata.Constants.*; @@ -60,19 +64,25 @@ public void execute() throws Exception { while (start < numEntities) { log.info("Reading urns {} to {} from the aspects table to generate dataplatform instance aspects", start, start + BATCH_SIZE); - Iterable urns = _migrationsDao.listAllUrns(start, start + BATCH_SIZE); - for (String urnStr : urns) { + + List items = new LinkedList<>(); + + for (String urnStr : _migrationsDao.listAllUrns(start, start + BATCH_SIZE)) { Urn urn = Urn.createFromString(urnStr); Optional dataPlatformInstance = getDataPlatformInstance(urn); - if (!dataPlatformInstance.isPresent()) { - continue; + if (dataPlatformInstance.isPresent()) { + items.add(UpsertBatchItem.builder() + .urn(urn) + .aspectName(DATA_PLATFORM_INSTANCE_ASPECT_NAME) + .aspect(dataPlatformInstance.get()) + .build(_entityService.getEntityRegistry())); } + } - final AuditStamp aspectAuditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + final AuditStamp aspectAuditStamp = + new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + _entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), aspectAuditStamp, true, true); - _entityService.ingestAspect(urn, DATA_PLATFORM_INSTANCE_ASPECT_NAME, dataPlatformInstance.get(), aspectAuditStamp, null); - } log.info("Finished ingesting DataPlatformInstance for urn {} to {}", start, start + BATCH_SIZE); start += BATCH_SIZE; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java index 11a45c4960b05..e4ad215eec864 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java @@ -12,6 +12,14 @@ import com.linkedin.metadata.entity.EntityService; import java.io.IOException; import java.net.URISyntaxException; +import java.util.List; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; @@ -49,32 +57,32 @@ public void execute() throws IOException, URISyntaxException { } // 2. For each JSON object, cast into a DataPlatformSnapshot object. - for (final JsonNode dataPlatform : dataPlatforms) { - final String urnString; - final Urn urn; - try { - urnString = dataPlatform.get("urn").asText(); - urn = Urn.createFromString(urnString); - } catch (URISyntaxException e) { - log.error("Malformed urn: {}", dataPlatform.get("urn").asText()); - throw new RuntimeException("Malformed urn", e); - } - - final DataPlatformInfo existingInfo = - (DataPlatformInfo) _entityService.getLatestAspect(urn, PLATFORM_ASPECT_NAME); - // Skip ingesting for this JSON object if info already exists. - if (existingInfo != null) { - log.debug(String.format("%s already exists for %s. Skipping...", PLATFORM_ASPECT_NAME, urnString)); - continue; - } - - final DataPlatformInfo info = - RecordUtils.toRecordTemplate(DataPlatformInfo.class, dataPlatform.get("aspect").toString()); - - final AuditStamp aspectAuditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - _entityService.ingestAspect(urn, PLATFORM_ASPECT_NAME, info, aspectAuditStamp, null); - } + List dataPlatformAspects = StreamSupport.stream( + Spliterators.spliteratorUnknownSize(dataPlatforms.iterator(), Spliterator.ORDERED), false) + .map(dataPlatform -> { + final String urnString; + final Urn urn; + try { + urnString = dataPlatform.get("urn").asText(); + urn = Urn.createFromString(urnString); + } catch (URISyntaxException e) { + log.error("Malformed urn: {}", dataPlatform.get("urn").asText()); + throw new RuntimeException("Malformed urn", e); + } + + final DataPlatformInfo info = + RecordUtils.toRecordTemplate(DataPlatformInfo.class, dataPlatform.get("aspect").toString()); + + return UpsertBatchItem.builder() + .urn(urn) + .aspectName(PLATFORM_ASPECT_NAME) + .aspect(info) + .build(_entityService.getEntityRegistry()); + }).collect(Collectors.toList()); + + _entityService.ingestAspects(AspectsBatchImpl.builder().items(dataPlatformAspects).build(), + new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), + true, + false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java index 08a867d710419..55d612618ff9f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java @@ -9,6 +9,7 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.UpgradeStep; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -19,6 +20,8 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; +import java.util.List; + import static com.linkedin.metadata.Constants.*; @@ -96,8 +99,6 @@ private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipType keyAspectProposal.setChangeType(ChangeType.UPSERT); keyAspectProposal.setEntityUrn(ownershipTypeUrn); - _entityService.ingestProposal(keyAspectProposal, auditStamp, false); - final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(ownershipTypeUrn); proposal.setEntityType(OWNERSHIP_TYPE_ENTITY_NAME); @@ -107,7 +108,9 @@ private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipType proposal.setAspect(GenericRecordUtils.serializeAspect(info)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(proposal, auditStamp, false); + _entityService.ingestProposal(AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityService.getEntityRegistry()).build(), auditStamp, + false); } @Nonnull diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java index 1025cacb3685c..87dcfd736da40 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java @@ -13,6 +13,7 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ListUrnsResult; @@ -28,8 +29,10 @@ import java.net.URISyntaxException; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Optional; + import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; @@ -172,9 +175,6 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR keyAspectProposal.setChangeType(ChangeType.UPSERT); keyAspectProposal.setEntityUrn(urn); - _entityService.ingestProposal(keyAspectProposal, - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), false); - final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); proposal.setEntityType(POLICY_ENTITY_NAME); @@ -182,8 +182,11 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR proposal.setAspect(GenericRecordUtils.serializeAspect(info)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(proposal, - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), false); + _entityService.ingestProposal(AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) + .build(), + new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), + false); } private boolean hasPolicy(Urn policyUrn) { diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java index b9f43cbf898a7..99be185113968 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java @@ -10,6 +10,7 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; @@ -18,6 +19,7 @@ import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.policy.DataHubRoleInfo; import java.net.URISyntaxException; +import java.util.List; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -99,9 +101,6 @@ private void ingestRole(final Urn roleUrn, final DataHubRoleInfo dataHubRoleInfo keyAspectProposal.setChangeType(ChangeType.UPSERT); keyAspectProposal.setEntityUrn(roleUrn); - _entityService.ingestProposal(keyAspectProposal, - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), false); - final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(roleUrn); proposal.setEntityType(DATAHUB_ROLE_ENTITY_NAME); @@ -109,10 +108,12 @@ private void ingestRole(final Urn roleUrn, final DataHubRoleInfo dataHubRoleInfo proposal.setAspect(GenericRecordUtils.serializeAspect(dataHubRoleInfo)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(proposal, - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), false); + _entityService.ingestProposal(AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityRegistry).build(), + new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), + false); - _entityService.produceMetadataChangeLog(roleUrn, DATAHUB_ROLE_ENTITY_NAME, DATAHUB_ROLE_INFO_ASPECT_NAME, + _entityService.alwaysProduceMCLAsync(roleUrn, DATAHUB_ROLE_ENTITY_NAME, DATAHUB_ROLE_INFO_ASPECT_NAME, roleInfoAspectSpec, null, dataHubRoleInfo, null, null, auditStamp, ChangeType.RESTATE); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java index b322afb809d2b..febcb9d4ec8a4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java @@ -16,7 +16,9 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import java.io.IOException; import java.net.URISyntaxException; +import java.util.List; +import com.linkedin.util.Pair; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; @@ -67,8 +69,11 @@ public void execute() throws IOException, URISyntaxException { final CorpUserKey key = (CorpUserKey) EntityKeyUtils.convertUrnToEntityKey(urn, getUserKeyAspectSpec()); final AuditStamp aspectAuditStamp = new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - _entityService.ingestAspect(urn, CORP_USER_KEY_ASPECT_NAME, key, aspectAuditStamp, null); - _entityService.ingestAspect(urn, USER_INFO_ASPECT_NAME, info, aspectAuditStamp, null); + + _entityService.ingestAspects(urn, List.of( + Pair.of(CORP_USER_KEY_ASPECT_NAME, key), + Pair.of(USER_INFO_ASPECT_NAME, info) + ), aspectAuditStamp, null); } private AspectSpec getUserKeyAspectSpec() { diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java index 6e1522051bfab..1f5f7f26ed89b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java @@ -16,7 +16,11 @@ import lombok.extern.slf4j.Slf4j; import javax.annotation.Nonnull; +import java.util.LinkedList; +import java.util.List; import java.util.Objects; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; @Slf4j public class RestoreColumnLineageIndices extends UpgradeStep { @@ -89,6 +93,7 @@ private int getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp return latestAspects.getTotalCount(); } + List> futures = new LinkedList<>(); for (int i = 0; i < latestAspects.getValues().size(); i++) { ExtraInfo info = latestAspects.getMetadata().getExtraInfos().get(i); RecordTemplate upstreamLineageRecord = latestAspects.getValues().get(i); @@ -99,7 +104,7 @@ private int getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp continue; } - _entityService.produceMetadataChangeLog( + futures.add(_entityService.alwaysProduceMCLAsync( urn, Constants.DATASET_ENTITY_NAME, Constants.UPSTREAM_LINEAGE_ASPECT_NAME, @@ -109,9 +114,17 @@ private int getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp null, null, auditStamp, - ChangeType.RESTATE); + ChangeType.RESTATE).getFirst()); } + futures.stream().filter(Objects::nonNull).forEach(f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + return latestAspects.getTotalCount(); } @@ -140,6 +153,7 @@ private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditS return latestAspects.getTotalCount(); } + List> futures = new LinkedList<>(); for (int i = 0; i < latestAspects.getValues().size(); i++) { ExtraInfo info = latestAspects.getMetadata().getExtraInfos().get(i); RecordTemplate inputFieldsRecord = latestAspects.getValues().get(i); @@ -150,7 +164,7 @@ private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditS continue; } - _entityService.produceMetadataChangeLog( + futures.add(_entityService.alwaysProduceMCLAsync( urn, entityName, Constants.INPUT_FIELDS_ASPECT_NAME, @@ -160,9 +174,17 @@ private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditS null, null, auditStamp, - ChangeType.RESTATE); + ChangeType.RESTATE).getFirst()); } + futures.stream().filter(Objects::nonNull).forEach(f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + return latestAspects.getTotalCount(); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java index 989ee1a39b169..355936fe1994c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java @@ -23,8 +23,12 @@ import java.net.URISyntaxException; import java.util.Collections; import java.util.HashSet; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -126,6 +130,7 @@ private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStam } // Loop over datasets and produce changelog + List> futures = new LinkedList<>(); for (Urn datasetUrn : datasetUrns) { EntityResponse response = upstreamLineageResponse.get(datasetUrn); if (response == null) { @@ -137,7 +142,7 @@ private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStam continue; } - _entityService.produceMetadataChangeLog( + futures.add(_entityService.alwaysProduceMCLAsync( datasetUrn, DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, @@ -147,8 +152,16 @@ private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStam null, null, auditStamp, - ChangeType.RESTATE); + ChangeType.RESTATE).getFirst()); } + + futures.stream().filter(Objects::nonNull).forEach(f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); } private UpstreamLineage getUpstreamLineage(EntityResponse entityResponse) { diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java index 097dcfdfdf52e..4de2bea9a76a9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java @@ -16,10 +16,15 @@ import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; + import java.util.Collections; import java.util.HashSet; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; import java.util.stream.Collectors; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -87,6 +92,7 @@ null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) ); // Loop over Terms and produce changelog + List> futures = new LinkedList<>(); for (Urn termUrn : termUrns) { EntityResponse termEntityResponse = termInfoResponses.get(termUrn); if (termEntityResponse == null) { @@ -99,7 +105,7 @@ null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) continue; } - _entityService.produceMetadataChangeLog( + futures.add(_entityService.alwaysProduceMCLAsync( termUrn, Constants.GLOSSARY_TERM_ENTITY_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, @@ -109,9 +115,17 @@ null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) null, null, auditStamp, - ChangeType.RESTATE); + ChangeType.RESTATE).getFirst()); } + futures.stream().filter(Objects::nonNull).forEach(f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + return termsResult.getNumEntities(); } @@ -130,6 +144,7 @@ null, null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) ); // Loop over Nodes and produce changelog + List> futures = new LinkedList<>(); for (Urn nodeUrn : nodeUrns) { EntityResponse nodeEntityResponse = nodeInfoResponses.get(nodeUrn); if (nodeEntityResponse == null) { @@ -142,7 +157,7 @@ null, null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) continue; } - _entityService.produceMetadataChangeLog( + futures.add(_entityService.alwaysProduceMCLAsync( nodeUrn, Constants.GLOSSARY_NODE_ENTITY_NAME, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, @@ -152,9 +167,17 @@ null, null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) null, null, auditStamp, - ChangeType.RESTATE); + ChangeType.RESTATE).getFirst()); } + futures.stream().filter(Objects::nonNull).forEach(f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + return nodesResult.getNumEntities(); } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java index 82f3a82c135ce..0ae8eb2cba808 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java @@ -5,6 +5,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.entity.AspectMigrationsDao; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; @@ -95,15 +96,24 @@ public void testExecuteWhenSomeEntitiesShouldReceiveDataPlatformInstance() throw final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); - verify(entityService, times(countOfChartEntities)) - .ingestAspect( - argThat(arg -> arg.getEntityType().equals("chart")), - eq(DATA_PLATFORM_INSTANCE_ASPECT_NAME), - any(DataPlatformInstance.class), + verify(entityService, times(1)) + .ingestAspects( + argThat(arg -> + arg.getItems().stream() + .allMatch(item -> item.getUrn().getEntityType().equals("chart") + && item.getAspectName().equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) + && ((UpsertBatchItem) item).getAspect() instanceof DataPlatformInstance) + ), any(), - any()); + anyBoolean(), + anyBoolean()); verify(entityService, times(0)) - .ingestAspect(argThat(arg -> !arg.getEntityType().equals("chart")), anyString(), any(), any(), any()); + .ingestAspects(argThat(arg -> + !arg.getItems().stream() + .allMatch(item -> item.getUrn().getEntityType().equals("chart") + && item.getAspectName().equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) + && ((UpsertBatchItem) item).getAspect() instanceof DataPlatformInstance) + ), any(), anyBoolean(), anyBoolean()); } @NotNull @@ -135,6 +145,7 @@ private void mockDBWithWorkToDo( when(migrationsDao.checkIfAspectExists(DATA_PLATFORM_INSTANCE_ASPECT_NAME)).thenReturn(false); when(migrationsDao.countEntities()).thenReturn((long) allUrnsInDB.size()); when(migrationsDao.listAllUrns(anyInt(), anyInt())).thenReturn(allUrnsInDB); + when(entityService.getEntityRegistry()).thenReturn(entityRegistry); } private List insertMockEntities(int count, String entity, String urnTemplate, EntityRegistry entityRegistry, EntityService entityService) { diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java index b73e749142863..aca5e322567d8 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java @@ -21,6 +21,7 @@ import com.linkedin.metadata.query.ExtraInfoArray; import com.linkedin.metadata.query.ListResultMetadata; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.util.Pair; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -29,6 +30,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.Future; public class RestoreColumnLineageIndicesTest { @@ -64,7 +66,7 @@ public void testExecuteFirstTime() throws Exception { Mockito.any(AuditStamp.class), Mockito.eq(false) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(datasetUrn), Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), @@ -76,7 +78,7 @@ public void testExecuteFirstTime() throws Exception { Mockito.any(), Mockito.eq(ChangeType.RESTATE) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(chartUrn), Mockito.eq(Constants.CHART_ENTITY_NAME), Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), @@ -88,7 +90,7 @@ public void testExecuteFirstTime() throws Exception { Mockito.any(), Mockito.eq(ChangeType.RESTATE) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(dashboardUrn), Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), @@ -126,7 +128,7 @@ public void testExecuteWithNewVersion() throws Exception { Mockito.any(AuditStamp.class), Mockito.eq(false) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(datasetUrn), Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), @@ -138,7 +140,7 @@ public void testExecuteWithNewVersion() throws Exception { Mockito.any(), Mockito.eq(ChangeType.RESTATE) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(chartUrn), Mockito.eq(Constants.CHART_ENTITY_NAME), Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), @@ -150,7 +152,7 @@ public void testExecuteWithNewVersion() throws Exception { Mockito.any(), Mockito.eq(ChangeType.RESTATE) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(dashboardUrn), Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), @@ -188,7 +190,7 @@ public void testDoesNotExecuteWithSameVersion() throws Exception { Mockito.any(AuditStamp.class), Mockito.eq(false) ); - Mockito.verify(mockService, Mockito.times(0)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( Mockito.eq(datasetUrn), Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), @@ -200,7 +202,7 @@ public void testDoesNotExecuteWithSameVersion() throws Exception { Mockito.any(), Mockito.eq(ChangeType.RESTATE) ); - Mockito.verify(mockService, Mockito.times(0)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( Mockito.eq(chartUrn), Mockito.eq(Constants.CHART_ENTITY_NAME), Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), @@ -212,7 +214,7 @@ public void testDoesNotExecuteWithSameVersion() throws Exception { Mockito.any(), Mockito.eq(ChangeType.RESTATE) ); - Mockito.verify(mockService, Mockito.times(0)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( Mockito.eq(dashboardUrn), Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), @@ -234,6 +236,12 @@ private void mockGetUpstreamLineage(@Nonnull Urn datasetUrn, @Nonnull EntityServ .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) ); + Mockito.when(mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), + Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), + Mockito.any(ChangeType.class) + )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + Mockito.when(mockService.listLatestAspects( Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java index d56fbed07f890..3753904053256 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java @@ -21,6 +21,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.util.Pair; import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -29,6 +30,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.concurrent.Future; public class RestoreGlossaryIndicesTest { @@ -94,6 +96,11 @@ public void testExecuteFirstTime() throws Exception { upgradeEntityUrn, Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) )).thenReturn(null); + Mockito.when(mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), + Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), + Mockito.any(ChangeType.class) + )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); mockGetTermInfo(glossaryTermUrn, mockSearchService, mockService); mockGetNodeInfo(glossaryNodeUrn, mockSearchService, mockService); @@ -111,7 +118,7 @@ public void testExecuteFirstTime() throws Exception { Mockito.any(AuditStamp.class), Mockito.eq(false) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(glossaryTermUrn), Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), @@ -123,7 +130,7 @@ public void testExecuteFirstTime() throws Exception { Mockito.any(), Mockito.eq(ChangeType.RESTATE) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(glossaryNodeUrn), Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), @@ -155,6 +162,11 @@ public void testExecutesWithNewVersion() throws Exception { upgradeEntityUrn, Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) )).thenReturn(response); + Mockito.when(mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), + Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), + Mockito.any(ChangeType.class) + )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); mockGetTermInfo(glossaryTermUrn, mockSearchService, mockService); mockGetNodeInfo(glossaryNodeUrn, mockSearchService, mockService); @@ -164,7 +176,6 @@ public void testExecutesWithNewVersion() throws Exception { RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); Mockito.verify(mockService, Mockito.times(2)).ingestProposal( @@ -172,7 +183,7 @@ public void testExecutesWithNewVersion() throws Exception { Mockito.any(AuditStamp.class), Mockito.eq(false) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(glossaryTermUrn), Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), @@ -184,7 +195,7 @@ public void testExecutesWithNewVersion() throws Exception { Mockito.any(), Mockito.eq(ChangeType.RESTATE) ); - Mockito.verify(mockService, Mockito.times(1)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( Mockito.eq(glossaryNodeUrn), Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), @@ -233,7 +244,7 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { Mockito.any(AuditStamp.class), Mockito.anyBoolean() ); - Mockito.verify(mockService, Mockito.times(0)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( Mockito.eq(glossaryTermUrn), Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), @@ -245,7 +256,7 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { Mockito.any(), Mockito.eq(ChangeType.RESTATE) ); - Mockito.verify(mockService, Mockito.times(0)).produceMetadataChangeLog( + Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( Mockito.eq(glossaryNodeUrn), Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), diff --git a/metadata-service/factories/src/test/resources/test-entity-registry.yaml b/metadata-service/factories/src/test/resources/test-entity-registry.yaml index 45aa9b9554fb4..fe32b413751e6 100644 --- a/metadata-service/factories/src/test/resources/test-entity-registry.yaml +++ b/metadata-service/factories/src/test/resources/test-entity-registry.yaml @@ -8,3 +8,9 @@ entities: keyAspect: chartKey aspects: - domains + - dataPlatformInstance + - name: dataPlatform + category: core + keyAspect: dataPlatformKey + aspects: + - dataPlatformInfo \ No newline at end of file diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java index 52842d2f32b2a..4d0e5e7df29d5 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java @@ -19,8 +19,10 @@ import com.linkedin.entity.Aspect; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.IngestProposalResult; +import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.RollbackRunResult; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.entity.validation.ValidationException; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.entity.AspectUtils; @@ -49,6 +51,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.reflections.Reflections; @@ -266,10 +269,17 @@ public static Pair ingestProposal(com.linkedin.mxe.MetadataChan log.info("Proposal: {}", serviceProposal); Throwable exceptionally = null; try { - IngestProposalResult proposalResult = entityService.ingestProposal(serviceProposal, auditStamp, false); - Urn urn = proposalResult.getUrn(); - additionalChanges.forEach(proposal -> entityService.ingestProposal(proposal, auditStamp, false)); - return new Pair<>(urn.toString(), proposalResult.isDidUpdate()); + Stream proposalStream = Stream.concat(Stream.of(serviceProposal), + AspectUtils.getAdditionalChanges(serviceProposal, entityService).stream()); + + AspectsBatch batch = AspectsBatchImpl.builder().mcps(proposalStream.collect(Collectors.toList()), + entityService.getEntityRegistry()).build(); + + Set proposalResult = + entityService.ingestProposal(batch, auditStamp, false); + + Urn urn = proposalResult.stream().findFirst().get().getUrn(); + return new Pair<>(urn.toString(), proposalResult.stream().anyMatch(IngestResult::isSqlCommitted)); } catch (ValidationException ve) { exceptionally = ve; throw HttpClientErrorException.create(HttpStatus.UNPROCESSABLE_ENTITY, ve.getMessage(), null, null, null); diff --git a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java index 80cc80067fba2..229e71168557d 100644 --- a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java +++ b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java @@ -9,6 +9,7 @@ import com.linkedin.metadata.config.PreProcessHooks; import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.entity.UpdateAspectResult; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.service.UpdateIndicesService; @@ -35,8 +36,12 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.function.Function; + +import io.ebean.Transaction; import mock.MockEntityRegistry; import mock.MockEntityService; +import org.mockito.ArgumentMatchers; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -60,6 +65,11 @@ public void setup() throws NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException { EntityRegistry mockEntityRegistry = new MockEntityRegistry(); AspectDao aspectDao = Mockito.mock(AspectDao.class); + Mockito.when(aspectDao.runInTransactionWithRetry( + ArgumentMatchers.>any(), any(), anyInt())).thenAnswer(i -> + ((Function) i.getArgument(0)).apply(Mockito.mock(Transaction.class)) + ); + EventProducer mockEntityEventProducer = Mockito.mock(EventProducer.class); UpdateIndicesService mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); diff --git a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java index cf7985a7c26a9..852b6cfcb4b22 100644 --- a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java +++ b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java @@ -24,9 +24,7 @@ import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.ListResult; import com.linkedin.metadata.entity.RollbackRunResult; -import com.linkedin.metadata.entity.UpdateAspectResult; import com.linkedin.metadata.event.EventProducer; -import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.run.AspectRowSummary; @@ -40,16 +38,13 @@ import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.SchemaMetadata; import com.linkedin.schema.StringType; -import com.linkedin.util.Pair; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.Set; -import java.util.function.Function; import javax.annotation.Nonnull; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -136,12 +131,6 @@ public EnvelopedAspect getLatestEnvelopedAspect(@Nonnull String entityName, @Non return null; } - @Override - public EnvelopedAspect getEnvelopedAspect(@Nonnull String entityName, @Nonnull Urn urn, @Nonnull String aspectName, - long version) throws Exception { - return null; - } - @Override public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version) { return null; @@ -153,7 +142,7 @@ public ListResult listLatestAspects(@Nonnull String entityName, return null; } - @Nonnull +/* @Nonnull @Override protected UpdateAspectResult ingestAspectToLocalDB(@Nonnull Urn urn, @Nonnull String aspectName, @Nonnull Function, RecordTemplate> updateLambda, @Nonnull AuditStamp auditStamp, @@ -168,7 +157,7 @@ protected List> ingestAspectsToLocalDB(@Nonnull @Nonnull List> aspectRecordsToIngest, @Nonnull AuditStamp auditStamp, @Nonnull SystemMetadata providedSystemMetadata) { return Collections.emptyList(); - } + }*/ @Nullable @Override @@ -177,13 +166,6 @@ public RecordTemplate ingestAspectIfNotPresent(@NotNull Urn urn, @NotNull String return null; } - @Override - public RecordTemplate updateAspect(@Nonnull Urn urn, @Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AspectSpec aspectSpec, @Nonnull RecordTemplate newValue, @Nonnull AuditStamp auditStamp, - @Nonnull long version, @Nonnull boolean emitMae) { - return null; - } - @Override public ListUrnsResult listUrns(@Nonnull String entityName, int start, int count) { return null; diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java index 5c06dbfc8b25a..936c8bb67e645 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java @@ -8,6 +8,9 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.linkedin.aspect.GetTimeseriesAspectValuesResponse; +import com.linkedin.metadata.entity.IngestResult; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.resources.operations.Utils; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -16,7 +19,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.IngestProposalResult; import com.linkedin.metadata.entity.validation.ValidationException; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.query.filter.Filter; @@ -41,6 +43,10 @@ import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.time.Clock; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.inject.Inject; @@ -179,7 +185,7 @@ public Task ingestProposal( @ActionParam(PARAM_ASYNC) @Optional(UNSET) String async) throws URISyntaxException { log.info("INGEST PROPOSAL proposal: {}", metadataChangeProposal); - boolean asyncBool; + final boolean asyncBool; if (UNSET.equals(async)) { asyncBool = Boolean.parseBoolean(System.getenv(ASYNC_INGEST_DEFAULT_NAME)); } else { @@ -200,18 +206,34 @@ public Task ingestProposal( return RestliUtil.toTask(() -> { log.debug("Proposal: {}", metadataChangeProposal); try { - IngestProposalResult result = _entityService.ingestProposal(metadataChangeProposal, auditStamp, asyncBool); - Urn responseUrn = result.getUrn(); + final AspectsBatch batch; + if (asyncBool) { + // if async we'll expand the getAdditionalChanges later, no need to do this early + batch = AspectsBatchImpl.builder() + .mcps(List.of(metadataChangeProposal), _entityService.getEntityRegistry()) + .build(); + } else { + Stream proposalStream = Stream.concat(Stream.of(metadataChangeProposal), + AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService).stream()); - if (!asyncBool) { - AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService) - .forEach(proposal -> _entityService.ingestProposal(proposal, auditStamp, asyncBool)); + batch = AspectsBatchImpl.builder() + .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) + .build(); } - if (!result.isQueued()) { - tryIndexRunId(responseUrn, metadataChangeProposal.getSystemMetadata(), _entitySearchService); + Set results = + _entityService.ingestProposal(batch, auditStamp, asyncBool); + + IngestResult one = results.stream() + .findFirst() + .get(); + + // Update runIds, only works for existing documents, so ES document must exist + Urn resultUrn = one.getUrn(); + if (one.isProcessedMCL() || one.isUpdate()) { + tryIndexRunId(resultUrn, metadataChangeProposal.getSystemMetadata(), _entitySearchService); } - return responseUrn.toString(); + return resultUrn.toString(); } catch (ValidationException e) { throw new RestLiServiceException(HttpStatus.S_422_UNPROCESSABLE_ENTITY, e.getMessage()); } diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java index c6d36a6e29f10..351a3d8f24e36 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java @@ -5,6 +5,7 @@ import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.plugins.auth.authorization.Authorizer; +import com.linkedin.common.AuditStamp; import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.common.urn.DatasetUrn; @@ -14,8 +15,9 @@ import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.UpdateAspectResult; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; +import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -24,6 +26,8 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; import java.net.URISyntaxException; +import java.util.List; + import mock.MockEntityRegistry; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; @@ -78,8 +82,34 @@ public void testAsyncDefaultAspects() throws URISyntaxException { reset(_producer, _aspectDao); - when(_aspectDao.runInTransactionWithRetry(any(), anyInt())) - .thenReturn(new UpdateAspectResult(urn, null, properties, null, null, null, null, 0)); + UpsertBatchItem req = UpsertBatchItem.builder() + .urn(urn) + .aspectName(mcp.getAspectName()) + .aspect(mcp.getAspect()) + .metadataChangeProposal(mcp) + .build(_entityRegistry); + when(_aspectDao.runInTransactionWithRetry(any(), any(), anyInt())) + .thenReturn(List.of( + UpdateAspectResult.builder().urn(urn) + .newValue(new DatasetProperties().setName("name1")) + .auditStamp(new AuditStamp()) + .request(req).build(), + UpdateAspectResult.builder().urn(urn) + .newValue(new DatasetProperties().setName("name2")) + .auditStamp(new AuditStamp()) + .request(req).build(), + UpdateAspectResult.builder().urn(urn) + .newValue(new DatasetProperties().setName("name3")) + .auditStamp(new AuditStamp()) + .request(req).build(), + UpdateAspectResult.builder().urn(urn) + .newValue(new DatasetProperties().setName("name4")) + .auditStamp(new AuditStamp()) + .request(req).build(), + UpdateAspectResult.builder().urn(urn) + .newValue(new DatasetProperties().setName("name5")) + .auditStamp(new AuditStamp()) + .request(req).build())); _aspectResource.ingestProposal(mcp, "false"); verify(_producer, times(5)).produceMetadataChangeLog(eq(urn), any(AspectSpec.class), any(MetadataChangeLog.class)); verifyNoMoreInteractions(_producer); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java index e062d55254f90..40a5e3a07ae6d 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.entity; import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -31,22 +32,52 @@ public class AspectUtils { private AspectUtils() { } + public static final Set SUPPORTED_TYPES = Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); + public static List getAdditionalChanges( - @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService) { - // No additional changes for delete operation - if (metadataChangeProposal.getChangeType() == ChangeType.DELETE) { + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull EntityService entityService, + boolean onPrimaryKeyInsertOnly) { + + // No additional changes for unsupported operations + if (!SUPPORTED_TYPES.contains(metadataChangeProposal.getChangeType())) { return Collections.emptyList(); } final Urn urn = EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, - entityService.getKeyAspectSpec(metadataChangeProposal.getEntityType())); + entityService.getKeyAspectSpec(metadataChangeProposal.getEntityType())); + + final Map includedAspects; + if (metadataChangeProposal.getChangeType() != ChangeType.PATCH) { + RecordTemplate aspectRecord = GenericRecordUtils.deserializeAspect(metadataChangeProposal.getAspect().getValue(), + metadataChangeProposal.getAspect().getContentType(), entityService.getEntityRegistry() + .getEntitySpec(urn.getEntityType()).getAspectSpec(metadataChangeProposal.getAspectName())); + includedAspects = ImmutableMap.of(metadataChangeProposal.getAspectName(), aspectRecord); + } else { + includedAspects = ImmutableMap.of(); + } + + if (onPrimaryKeyInsertOnly) { + return entityService.generateDefaultAspectsOnFirstWrite(urn, includedAspects) + .getValue() + .stream() + .map(entry -> getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } else { + return entityService.generateDefaultAspectsIfMissing(urn, includedAspects) + .stream() + .map(entry -> getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } + } + + public static List getAdditionalChanges( + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull EntityService entityService) { - return entityService.generateDefaultAspectsIfMissing(urn, ImmutableSet.of(metadataChangeProposal.getAspectName())) - .stream() - .map(entry -> getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + return getAdditionalChanges(metadataChangeProposal, entityService, false); } public static Map batchGetLatestAspect( diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java index 35b7bc4589b32..40284efe7ac82 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java @@ -270,9 +270,9 @@ private void updateAspect(Urn urn, String aspectName, RecordTemplate prevAspect, proposal.setAspect(GenericRecordUtils.serializeAspect(newAspect)); final AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - final IngestProposalResult ingestProposalResult = _entityService.ingestProposal(proposal, auditStamp, false); + final IngestResult ingestProposalResult = _entityService.ingestProposal(proposal, auditStamp, false); - if (!ingestProposalResult.isDidUpdate()) { + if (!ingestProposalResult.isSqlCommitted()) { log.error("Failed to ingest aspect with references removed. Before {}, after: {}, please check MCP processor" + " logs for more information", prevAspect, newAspect); handleError(new DeleteEntityServiceError("Failed to ingest new aspect", diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index 25edff740037e..86043f4b7cd27 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -13,12 +13,11 @@ import com.linkedin.metadata.aspect.VersionedAspect; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.run.AspectRowSummary; -import com.linkedin.metadata.snapshot.Snapshot; -import com.linkedin.mxe.MetadataAuditOperation; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; @@ -28,6 +27,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.concurrent.Future; import java.util.function.Consumer; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -140,22 +140,6 @@ EnvelopedAspect getLatestEnvelopedAspect( @Nonnull final Urn urn, @Nonnull final String aspectName) throws Exception; - /** - * Retrieves the specific version of the aspect for the given urn - * - * @param entityName name of the entity to fetch - * @param urn urn to fetch - * @param aspectName name of the aspect to fetch - * @param version version to fetch - * @return {@link EnvelopedAspect} object, or null if one cannot be found - */ - EnvelopedAspect getEnvelopedAspect( - // TODO: entityName is only used for a debug statement, can we remove this as a param? - String entityName, - @Nonnull Urn urn, - @Nonnull String aspectName, - long version) throws Exception; - @Deprecated VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version); @@ -165,24 +149,11 @@ ListResult listLatestAspects( final int start, final int count); - void ingestAspects(@Nonnull final Urn urn, @Nonnull List> aspectRecordsToIngest, + List ingestAspects(@Nonnull final Urn urn, @Nonnull List> aspectRecordsToIngest, @Nonnull final AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata); - /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataAuditEvent}. - * - * Note that in general, this should not be used externally. It is currently serving upgrade scripts and - * is as such public. - * - * @param urn an urn associated with the new aspect - * @param aspectName name of the aspect being inserted - * @param newValue value of the aspect being inserted - * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time - * @param systemMetadata - * @return the {@link RecordTemplate} representation of the written aspect object - */ - RecordTemplate ingestAspect(@Nonnull final Urn urn, @Nonnull final String aspectName, - @Nonnull final RecordTemplate newValue, @Nonnull final AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata); + List ingestAspects(@Nonnull final AspectsBatch aspectsBatch, @Nonnull final AuditStamp auditStamp, + boolean emitMCL, boolean overwrite); /** * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataAuditEvent}. @@ -211,17 +182,6 @@ String batchApplyRetention(Integer start, Integer count, Integer attemptWithVers // TODO: Extract this to a different service, doesn't need to be here RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @Nonnull Consumer logger); - @Deprecated - RecordTemplate updateAspect( - @Nonnull final Urn urn, - @Nonnull final String entityName, - @Nonnull final String aspectName, - @Nonnull final AspectSpec aspectSpec, - @Nonnull final RecordTemplate newValue, - @Nonnull final AuditStamp auditStamp, - @Nonnull final long version, - @Nonnull final boolean emitMae); - ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count); @Deprecated @@ -230,23 +190,14 @@ RecordTemplate updateAspect( @Deprecated Map getEntities(@Nonnull final Set urns, @Nonnull Set aspectNames); - @Deprecated - void produceMetadataAuditEvent(@Nonnull final Urn urn, @Nonnull final String aspectName, - @Nullable final RecordTemplate oldAspectValue, @Nullable final RecordTemplate newAspectValue, - @Nullable final SystemMetadata oldSystemMetadata, @Nullable final SystemMetadata newSystemMetadata, - @Nullable final MetadataAuditOperation operation); - - @Deprecated - void produceMetadataAuditEventForKey(@Nonnull final Urn urn, - @Nullable final SystemMetadata newSystemMetadata); - - void produceMetadataChangeLog(@Nonnull final Urn urn, AspectSpec aspectSpec, + Pair, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, AspectSpec aspectSpec, @Nonnull final MetadataChangeLog metadataChangeLog); - void produceMetadataChangeLog(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, - @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, - @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, - @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, @Nonnull final ChangeType changeType); + Pair, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, + @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, + @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, + @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, + @Nonnull final ChangeType changeType); RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final String aspectName); @@ -261,9 +212,6 @@ void ingestEntities(@Nonnull final List entities, @Nonnull final AuditSt void ingestEntity(@Nonnull Entity entity, @Nonnull AuditStamp auditStamp, @Nonnull SystemMetadata systemMetadata); - @Deprecated - Snapshot buildSnapshot(@Nonnull final Urn urn, @Nonnull final RecordTemplate aspectValue); - void setRetentionService(RetentionService retentionService); AspectSpec getKeyAspectSpec(@Nonnull final Urn urn); @@ -272,8 +220,33 @@ void ingestEntity(@Nonnull Entity entity, @Nonnull AuditStamp auditStamp, String getKeyAspectName(@Nonnull final Urn urn); + /** + * Generate default aspects if not present in the database. + * @param urn entity urn + * @param includedAspects aspects being written + * @return additional aspects to be written + */ List> generateDefaultAspectsIfMissing(@Nonnull final Urn urn, - Set includedAspects); + Map includedAspects); + + /** + * Generate default aspects if the entity key aspect is NOT in the database **AND** + * the key aspect is being written, present in `includedAspects`. + * + * Does not automatically create key aspects. + * @see EntityService#generateDefaultAspectsIfMissing if key aspects need autogeneration + * + * This version is more efficient in that it only generates additional writes + * when a new entity is being minted for the first time. The drawback is that it will not automatically + * add key aspects, in case the producer is not bothering to ensure that the entity exists + * before writing non-key aspects. + * + * @param urn entity urn + * @param includedAspects aspects being written + * @return whether key aspect exists in database and the additional aspects to be written + */ + Pair>> generateDefaultAspectsOnFirstWrite(@Nonnull final Urn urn, + Map includedAspects); AspectSpec getKeyAspectSpec(@Nonnull final String entityName); @@ -289,8 +262,16 @@ List> generateDefaultAspectsIfMissing(@Nonnull fina RollbackRunResult rollbackWithConditions(List aspectRows, Map conditions, boolean hardDelete); - IngestProposalResult ingestProposal(@Nonnull MetadataChangeProposal mcp, - AuditStamp auditStamp, final boolean async); + Set ingestProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async); + + /** + * If you have more than 1 proposal use the {AspectsBatch} method + * @param proposal the metadata proposal to ingest + * @param auditStamp audit information + * @param async async ingestion or sync ingestion + * @return ingestion result + */ + IngestResult ingestProposal(MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async); Boolean exists(Urn urn); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java new file mode 100644 index 0000000000000..5e4ed6259a7f7 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java @@ -0,0 +1,18 @@ +package com.linkedin.metadata.entity; + +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.entity.transactions.AbstractBatchItem; +import lombok.Builder; +import lombok.Value; + +@Builder(toBuilder = true) +@Value +public class IngestResult { + Urn urn; + AbstractBatchItem request; + boolean publishedMCL; + boolean processedMCL; + boolean publishedMCP; + boolean sqlCommitted; + boolean isUpdate; // update else insert +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java index a27cb8076721a..1cdd9965c4bfc 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java @@ -9,6 +9,7 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.key.DataHubRetentionKey; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -21,10 +22,11 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; + +import lombok.Builder; import lombok.SneakyThrows; import lombok.Value; @@ -65,7 +67,7 @@ public Retention getRetention(@Nonnull String entityName, @Nonnull String aspect } // Get list of datahub retention keys that match the input entity name and aspect name - protected List getRetentionKeys(@Nonnull String entityName, @Nonnull String aspectName) { + protected static List getRetentionKeys(@Nonnull String entityName, @Nonnull String aspectName) { return ImmutableList.of( new DataHubRetentionKey().setEntityName(entityName).setAspectName(aspectName), new DataHubRetentionKey().setEntityName(entityName).setAspectName(ALL), @@ -87,12 +89,13 @@ protected List getRetentionKeys(@Nonnull String entityName, @Nonnull String */ @SneakyThrows public boolean setRetention(@Nullable String entityName, @Nullable String aspectName, - @Nonnull DataHubRetentionConfig retentionConfig) { + @Nonnull DataHubRetentionConfig retentionConfig) { validateRetention(retentionConfig.getRetention()); DataHubRetentionKey retentionKey = new DataHubRetentionKey(); retentionKey.setEntityName(entityName != null ? entityName : ALL); retentionKey.setAspectName(aspectName != null ? aspectName : ALL); Urn retentionUrn = EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); + MetadataChangeProposal keyProposal = new MetadataChangeProposal(); GenericAspect keyAspect = GenericRecordUtils.serializeAspect(retentionKey); keyProposal.setAspect(keyAspect); @@ -100,17 +103,22 @@ public boolean setRetention(@Nullable String entityName, @Nullable String aspect keyProposal.setEntityType(Constants.DATAHUB_RETENTION_ENTITY); keyProposal.setChangeType(ChangeType.UPSERT); keyProposal.setEntityUrn(retentionUrn); - AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - getEntityService().ingestProposal(keyProposal, auditStamp, false); + MetadataChangeProposal aspectProposal = keyProposal.clone(); GenericAspect retentionAspect = GenericRecordUtils.serializeAspect(retentionConfig); aspectProposal.setAspect(retentionAspect); aspectProposal.setAspectName(Constants.DATAHUB_RETENTION_ASPECT); - aspectProposal.setChangeType(ChangeType.UPSERT); - return getEntityService().ingestProposal(aspectProposal, auditStamp, false).isDidUpdate(); + + AuditStamp auditStamp = + new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + AspectsBatch batch = buildAspectsBatch(List.of(keyProposal, aspectProposal)); + + return getEntityService().ingestProposal(batch, auditStamp, false).stream() + .anyMatch(IngestResult::isSqlCommitted); } + protected abstract AspectsBatch buildAspectsBatch(List mcps); + /** * Delete the retention policy set for given entity and aspect. * @@ -140,41 +148,40 @@ private void validateRetention(Retention retention) { } } - /** - * Apply retention policies given the urn and aspect name asynchronously - * - * @param urn Urn of the entity - * @param aspectName Name of the aspect - * @param context Additional context that could be used to apply retention - */ - public void applyRetentionAsync(@Nonnull Urn urn, @Nonnull String aspectName, Optional context) { - CompletableFuture.runAsync(() -> applyRetention(urn, aspectName, context)); - } - /** * Apply retention policies given the urn and aspect name * - * @param urn Urn of the entity - * @param aspectName Name of the aspect - * @param context Additional context that could be used to apply retention + * @param retentionContexts urn, aspect name, and additional context that could be used to apply retention */ - public void applyRetention(@Nonnull Urn urn, @Nonnull String aspectName, Optional context) { - Retention retentionPolicy = getRetention(urn.getEntityType(), aspectName); - if (retentionPolicy.data().isEmpty()) { - return; - } - applyRetention(urn, aspectName, retentionPolicy, context); + public void applyRetentionWithPolicyDefaults(@Nonnull List retentionContexts) { + List withDefaults = retentionContexts.stream() + .map(context -> { + if (context.getRetentionPolicy().isEmpty()) { + Retention retentionPolicy = getRetention(context.getUrn().getEntityType(), context.getAspectName()); + return context.toBuilder() + .retentionPolicy(Optional.of(retentionPolicy)) + .build(); + } else { + return context; + } + }) + .filter(context -> context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) + .collect(Collectors.toList()); + + applyRetention(withDefaults); } /** - * Apply retention policies given the urn and aspect name and policies - * @param urn Urn of the entity - * @param aspectName Name of the aspect - * @param retentionPolicy Retention policies to apply - * @param retentionContext Additional context that could be used to apply retention + * Apply retention policies given the urn and aspect name and policies. This protected + * method assumes that the policy is provided, however we likely need to fetch these + * from system configuration. + * + * Users of this should use {@link #applyRetentionWithPolicyDefaults(List)}) + * + * @param retentionContexts Additional context that could be used to apply retention */ - public abstract void applyRetention(@Nonnull Urn urn, @Nonnull String aspectName, Retention retentionPolicy, - Optional retentionContext); + protected abstract void applyRetention(List retentionContexts); /** * Batch apply retention to all records that match the input entityName and aspectName @@ -189,9 +196,16 @@ public abstract void applyRetention(@Nonnull Urn urn, @Nonnull String aspectName */ public abstract BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args); - @Value + @Builder(toBuilder = true) public static class RetentionContext { - Optional maxVersion; + @Nonnull + Urn urn; + @Nonnull + String aspectName; + @Builder.Default + Optional retentionPolicy = Optional.empty(); + @Builder.Default + Optional maxVersion = Optional.empty(); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java index 68ecdbd87dd16..06199814d30dd 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java @@ -3,14 +3,20 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.mxe.MetadataAuditOperation; import com.linkedin.mxe.SystemMetadata; +import lombok.Builder; import lombok.Value; +import java.util.concurrent.Future; + +@Builder(toBuilder = true) @Value public class UpdateAspectResult { Urn urn; + AbstractBatchItem request; RecordTemplate oldValue; RecordTemplate newValue; SystemMetadata oldSystemMetadata; @@ -18,4 +24,6 @@ public class UpdateAspectResult { MetadataAuditOperation operation; AuditStamp auditStamp; long maxVersion; + boolean processedMCL; + Future mclFuture; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java new file mode 100644 index 0000000000000..03a2b4e2a7f73 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java @@ -0,0 +1,92 @@ +package com.linkedin.metadata.entity.transactions; + +import com.linkedin.common.urn.Urn; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.mxe.SystemMetadata; + +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +import static com.linkedin.metadata.Constants.*; + + +public abstract class AbstractBatchItem { + // urn an urn associated with the new aspect + public abstract Urn getUrn(); + + // aspectName name of the aspect being inserted + public abstract String getAspectName(); + + public abstract SystemMetadata getSystemMetadata(); + + public abstract ChangeType getChangeType(); + + public abstract EntitySpec getEntitySpec(); + + public abstract AspectSpec getAspectSpec(); + + public abstract MetadataChangeProposal getMetadataChangeProposal(); + + public abstract void validateUrn(EntityRegistry entityRegistry, Urn urn); + + @Nonnull + protected static SystemMetadata generateSystemMetadataIfEmpty(@Nullable SystemMetadata systemMetadata) { + if (systemMetadata == null) { + systemMetadata = new SystemMetadata(); + systemMetadata.setRunId(DEFAULT_RUN_ID); + systemMetadata.setLastObserved(System.currentTimeMillis()); + } + return systemMetadata; + } + + protected static AspectSpec validateAspect(MetadataChangeProposal mcp, EntitySpec entitySpec) { + if (!mcp.hasAspectName() || !mcp.hasAspect()) { + throw new UnsupportedOperationException("Aspect and aspect name is required for create and update operations"); + } + + AspectSpec aspectSpec = entitySpec.getAspectSpec(mcp.getAspectName()); + + if (aspectSpec == null) { + throw new RuntimeException( + String.format("Unknown aspect %s for entity %s", mcp.getAspectName(), + mcp.getEntityType())); + } + + return aspectSpec; + } + + /** + * Validates that a change type is valid for the given aspect + * @param changeType + * @param aspectSpec + * @return + */ + protected static boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) { + if (aspectSpec.isTimeseries()) { + // Timeseries aspects only support UPSERT + return ChangeType.UPSERT.equals(changeType); + } else { + if (ChangeType.PATCH.equals(changeType)) { + return supportsPatch(aspectSpec); + } else { + return ChangeType.UPSERT.equals(changeType); + } + } + } + + protected static boolean supportsPatch(AspectSpec aspectSpec) { + // Limit initial support to defined templates + if (!AspectTemplateEngine.SUPPORTED_TEMPLATES.contains(aspectSpec.getName())) { + // Prevent unexpected behavior for aspects that do not currently have 1st class patch support, + // specifically having array based fields that require merging without specifying merge behavior can get into bad states + throw new UnsupportedOperationException("Aspect: " + aspectSpec.getName() + " does not currently support patch " + + "operations."); + } + return true; + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java new file mode 100644 index 0000000000000..1d3da08130071 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java @@ -0,0 +1,22 @@ +package com.linkedin.metadata.entity.transactions; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + + +public interface AspectsBatch { + List getItems(); + + default boolean containsDuplicateAspects() { + return getItems().stream().map(i -> String.format("%s_%s", i.getClass().getName(), i.hashCode())) + .distinct().count() != getItems().size(); + } + + default Map> getUrnAspectsMap() { + return getItems().stream() + .map(aspect -> Map.entry(aspect.getUrn().toString(), aspect.getAspectName())) + .collect(Collectors.groupingBy(Map.Entry::getKey, Collectors.mapping(Map.Entry::getValue, Collectors.toSet()))); + } +} diff --git a/metadata-service/war/build.gradle b/metadata-service/war/build.gradle index ae207e0260e60..3bd2695c927a7 100644 --- a/metadata-service/war/build.gradle +++ b/metadata-service/war/build.gradle @@ -82,7 +82,7 @@ docker { load(true) push(false) } -tasks.getByName("docker").dependsOn([build, war]) +tasks.getByPath(":metadata-service:war:docker").dependsOn([build, war]) task cleanLocalDockerImages { doLast { diff --git a/mock-entity-registry/src/main/java/mock/MockEntitySpec.java b/mock-entity-registry/src/main/java/mock/MockEntitySpec.java index f43c1f7fd6613..d740fff29e258 100644 --- a/mock-entity-registry/src/main/java/mock/MockEntitySpec.java +++ b/mock-entity-registry/src/main/java/mock/MockEntitySpec.java @@ -57,7 +57,7 @@ public EntityAnnotation getEntityAnnotation() { @Override public String getKeyAspectName() { - return null; + return _name + "Key"; } @Override diff --git a/settings.gradle b/settings.gradle index f326cbf10b96b..270672e929e88 100644 --- a/settings.gradle +++ b/settings.gradle @@ -34,7 +34,6 @@ include ':docker:postgres-setup' include ':docker:kafka-setup' include ':docker:datahub-ingestion' include ':docker:datahub-ingestion-base' -include ':docker:datahub-ingestion-slim' include 'metadata-models' include 'metadata-models-validator' include 'metadata-testing:metadata-models-test-utils' diff --git a/smoke-test/run-quickstart.sh b/smoke-test/run-quickstart.sh index d40e4a5e7a4aa..050b5d2db95c9 100755 --- a/smoke-test/run-quickstart.sh +++ b/smoke-test/run-quickstart.sh @@ -15,4 +15,4 @@ echo "test_user:test_pass" >> ~/.datahub/plugins/frontend/auth/user.props echo "DATAHUB_VERSION = $DATAHUB_VERSION" DATAHUB_TELEMETRY_ENABLED=false \ DOCKER_COMPOSE_BASE="file://$( dirname "$DIR" )" \ -datahub docker quickstart --version ${DATAHUB_VERSION} --standalone_consumers --dump-logs-on-failure --kafka-setup \ No newline at end of file +datahub docker quickstart --version ${DATAHUB_VERSION} --standalone_consumers --dump-logs-on-failure --kafka-setup diff --git a/smoke-test/tests/tags-and-terms/data.json b/smoke-test/tests/tags-and-terms/data.json index d018061796296..349400f099339 100644 --- a/smoke-test/tests/tags-and-terms/data.json +++ b/smoke-test/tests/tags-and-terms/data.json @@ -180,5 +180,44 @@ } }, "proposedDelta": null + }, + { + "auditHeader": null, + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.TagSnapshot": { + "urn": "urn:li:tag:Legacy", + "aspects": [ + { + "com.linkedin.pegasus2avro.tag.TagProperties": { + "name": "Legacy", + "description": "Indicates the dataset is no longer supported" + } + } + ] + } + }, + "proposedDelta": null + }, + { + "auditHeader": null, + "proposedSnapshot": { + "com.linkedin.pegasus2avro.metadata.snapshot.GlossaryTermSnapshot": { + "urn": "urn:li:glossaryTerm:SavingAccount", + "aspects": [ + { + "com.linkedin.pegasus2avro.glossary.GlossaryTermInfo": { + "definition": "a product provided to consumers and businesses by a bank or similar depository institution such as a checking account, savings account, certificate of deposit, debit or pre-paid card, or credit card", + "sourceRef": "FIBO", + "termSource": "EXTERNAL", + "sourceUrl": "https://spec.edmcouncil.org/fibo/ontology/FBC/FunctionalEntities/FinancialServicesEntities/BankingProduct", + "customProperties": { + "FQDN": "SavingAccount" + } + } + } + ] + } + }, + "proposedDelta": null } ] \ No newline at end of file From d30898637d8f803ca5165459c67f56d4b5070278 Mon Sep 17 00:00:00 2001 From: Ne Enot Date: Sun, 3 Sep 2023 04:37:26 +0300 Subject: [PATCH 38/41] fix(frontend): Fix"Logout with OIDC not working" (#8773) --- .../app/controllers/CentralLogoutController.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/datahub-frontend/app/controllers/CentralLogoutController.java b/datahub-frontend/app/controllers/CentralLogoutController.java index b1220c7f3f111..5e24fe9f8220c 100644 --- a/datahub-frontend/app/controllers/CentralLogoutController.java +++ b/datahub-frontend/app/controllers/CentralLogoutController.java @@ -16,7 +16,8 @@ */ @Slf4j public class CentralLogoutController extends LogoutController { - private static final String DEFAULT_BASE_URL_PATH = "/login"; + private static final String AUTH_URL_CONFIG_PATH = "/login"; + private static final String DEFAULT_BASE_URL_PATH = "/"; private static Boolean _isOidcEnabled = false; @Inject @@ -36,8 +37,7 @@ public CentralLogoutController(Config config) { public Result executeLogout(Http.Request request) { if (_isOidcEnabled) { try { - return Results.redirect(DEFAULT_BASE_URL_PATH) - .removingFromSession(request); + return logout(request).toCompletableFuture().get().withNewSession(); } catch (Exception e) { log.error("Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", e); return redirect( @@ -47,7 +47,7 @@ public Result executeLogout(Http.Request request) { .withNewSession(); } } - return Results.redirect(DEFAULT_BASE_URL_PATH) + return Results.redirect(AUTH_URL_CONFIG_PATH) .withNewSession(); } } From 801208e1009c5e29701118166f36c013427b88f0 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Mon, 4 Sep 2023 13:37:18 +0900 Subject: [PATCH 39/41] docs:upgrade docusaurus version (#8770) --- docs-website/generateDocsDir.ts | 11 +- docs-website/package.json | 10 +- .../docs/_components/SearchBar/index.jsx | 5 +- docs-website/yarn.lock | 5473 +++++++++-------- 4 files changed, 2771 insertions(+), 2728 deletions(-) diff --git a/docs-website/generateDocsDir.ts b/docs-website/generateDocsDir.ts index ee6e6b586615c..892d02c43fe97 100644 --- a/docs-website/generateDocsDir.ts +++ b/docs-website/generateDocsDir.ts @@ -1,5 +1,5 @@ import { execSync } from "child_process"; -import * as matter from "gray-matter"; +import matter from "gray-matter"; import * as fs from "fs"; import * as path from "path"; import { Octokit } from "@octokit/rest"; @@ -26,7 +26,7 @@ const sidebars_text = fs.readFileSync(SIDEBARS_DEF_PATH).toString(); const MyOctokit = Octokit.plugin(retry).plugin(throttling); const octokit = new MyOctokit({ throttle: { - onRateLimit: (retryAfter, options) => { + onRateLimit: (retryAfter: number, options: any) => { // Retry twice after rate limit is hit. if (options.request.retryCount <= 2) { return true; @@ -157,7 +157,7 @@ function get_slug(filepath: string): string { // There's no need to do this cleanup, but it does make the URLs a bit more aesthetic. if (filepath in hardcoded_slugs) { - return hardcoded_slugs[filepath]; + return hardcoded_slugs[filepath as keyof typeof hardcoded_slugs]; } let slug = get_id(filepath); @@ -218,9 +218,10 @@ function markdown_guess_title( let title: string; if (filepath in hardcoded_titles) { - title = hardcoded_titles[filepath]; + title = hardcoded_titles[filepath as keyof typeof hardcoded_titles]; if (filepath in hardcoded_descriptions) { - contents.data.description = hardcoded_descriptions[filepath]; + contents.data.description = + hardcoded_descriptions[filepath as keyof typeof hardcoded_descriptions]; } if (hardcoded_hide_title.includes(filepath)) { contents.data.hide_title = true; diff --git a/docs-website/package.json b/docs-website/package.json index f382b5e29905c..6a0f038d19f33 100644 --- a/docs-website/package.json +++ b/docs-website/package.json @@ -22,10 +22,10 @@ }, "dependencies": { "@ant-design/icons": "^4.7.0", - "@docusaurus/core": "^2.1.0", - "@docusaurus/plugin-content-docs": "^2.1.0", - "@docusaurus/plugin-ideal-image": "^2.1.0", - "@docusaurus/preset-classic": "^2.1.0", + "@docusaurus/core": "^2.4.1", + "@docusaurus/plugin-content-docs": "2.4.1", + "@docusaurus/plugin-ideal-image": "^2.4.1", + "@docusaurus/preset-classic": "^2.4.1", "@markprompt/react": "^0.3.5", "@octokit/plugin-retry": "^3.0.9", "@octokit/plugin-throttling": "^3.5.1", @@ -55,7 +55,7 @@ }, "devDependencies": { "prettier": "^2.3.0", - "ts-node": "^9.1.1", + "ts-node": "^10.9.1", "typescript": "^4.1.5" } } diff --git a/docs-website/src/pages/docs/_components/SearchBar/index.jsx b/docs-website/src/pages/docs/_components/SearchBar/index.jsx index 054c041d8a9e5..4f26d758bd5de 100644 --- a/docs-website/src/pages/docs/_components/SearchBar/index.jsx +++ b/docs-website/src/pages/docs/_components/SearchBar/index.jsx @@ -18,7 +18,8 @@ import ExecutionEnvironment from "@docusaurus/ExecutionEnvironment"; import { usePluralForm, useEvent } from "@docusaurus/theme-common"; import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; import { useAllDocsData } from "@docusaurus/plugin-content-docs/client"; -import { useSearchPage } from "@docusaurus/theme-common/internal"; +import { useSearchQueryString } from '@docusaurus/theme-common'; +import {useTitleFormatter} from '@docusaurus/theme-common/internal'; import Translate, { translate } from "@docusaurus/Translate"; import styles from "./search.module.scss"; @@ -103,7 +104,7 @@ function SearchBar() { const documentsFoundPlural = useDocumentsFoundPlural(); const docsSearchVersionsHelpers = useDocsSearchVersionsHelpers(); - const { searchQuery, setSearchQuery } = useSearchPage() + const [searchQuery, setSearchQuery] = useSearchQueryString(); const initialSearchResultState = { items: [], query: null, diff --git a/docs-website/yarn.lock b/docs-website/yarn.lock index ccf0793ec3c76..a93ae9e2bc9c3 100644 --- a/docs-website/yarn.lock +++ b/docs-website/yarn.lock @@ -2,140 +2,148 @@ # yarn lockfile v1 -"@algolia/autocomplete-core@1.7.2": - version "1.7.2" - resolved "https://registry.yarnpkg.com/@algolia/autocomplete-core/-/autocomplete-core-1.7.2.tgz#8abbed88082f611997538760dffcb43b33b1fd1d" - integrity sha512-eclwUDC6qfApNnEfu1uWcL/rudQsn59tjEoUYZYE2JSXZrHLRjBUGMxiCoknobU2Pva8ejb0eRxpIYDtVVqdsw== +"@algolia/autocomplete-core@1.9.3": + version "1.9.3" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-core/-/autocomplete-core-1.9.3.tgz#1d56482a768c33aae0868c8533049e02e8961be7" + integrity sha512-009HdfugtGCdC4JdXUbVJClA0q0zh24yyePn+KUGk3rP7j8FEe/m5Yo/z65gn6nP/cM39PxpzqKrL7A6fP6PPw== + dependencies: + "@algolia/autocomplete-plugin-algolia-insights" "1.9.3" + "@algolia/autocomplete-shared" "1.9.3" + +"@algolia/autocomplete-plugin-algolia-insights@1.9.3": + version "1.9.3" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-plugin-algolia-insights/-/autocomplete-plugin-algolia-insights-1.9.3.tgz#9b7f8641052c8ead6d66c1623d444cbe19dde587" + integrity sha512-a/yTUkcO/Vyy+JffmAnTWbr4/90cLzw+CC3bRbhnULr/EM0fGNvM13oQQ14f2moLMcVDyAx/leczLlAOovhSZg== dependencies: - "@algolia/autocomplete-shared" "1.7.2" + "@algolia/autocomplete-shared" "1.9.3" -"@algolia/autocomplete-preset-algolia@1.7.2": - version "1.7.2" - resolved "https://registry.yarnpkg.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.7.2.tgz#9cd4f64b3d64399657ee2dc2b7e0a939e0713a26" - integrity sha512-+RYEG6B0QiGGfRb2G3MtPfyrl0dALF3cQNTWBzBX6p5o01vCCGTTinAm2UKG3tfc2CnOMAtnPLkzNZyJUpnVJw== +"@algolia/autocomplete-preset-algolia@1.9.3": + version "1.9.3" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.9.3.tgz#64cca4a4304cfcad2cf730e83067e0c1b2f485da" + integrity sha512-d4qlt6YmrLMYy95n5TB52wtNDr6EgAIPH81dvvvW8UmuWRgxEtY0NJiPwl/h95JtG2vmRM804M0DSwMCNZlzRA== dependencies: - "@algolia/autocomplete-shared" "1.7.2" + "@algolia/autocomplete-shared" "1.9.3" -"@algolia/autocomplete-shared@1.7.2": - version "1.7.2" - resolved "https://registry.yarnpkg.com/@algolia/autocomplete-shared/-/autocomplete-shared-1.7.2.tgz#daa23280e78d3b42ae9564d12470ae034db51a89" - integrity sha512-QCckjiC7xXHIUaIL3ektBtjJ0w7tTA3iqKcAE/Hjn1lZ5omp7i3Y4e09rAr9ZybqirL7AbxCLLq0Ra5DDPKeug== +"@algolia/autocomplete-shared@1.9.3": + version "1.9.3" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-shared/-/autocomplete-shared-1.9.3.tgz#2e22e830d36f0a9cf2c0ccd3c7f6d59435b77dfa" + integrity sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ== -"@algolia/cache-browser-local-storage@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.14.3.tgz#b9e0da012b2f124f785134a4d468ee0841b2399d" - integrity sha512-hWH1yCxgG3+R/xZIscmUrWAIBnmBFHH5j30fY/+aPkEZWt90wYILfAHIOZ1/Wxhho5SkPfwFmT7ooX2d9JeQBw== +"@algolia/cache-browser-local-storage@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.19.1.tgz#d29f42775ed4d117182897ac164519c593faf399" + integrity sha512-FYAZWcGsFTTaSAwj9Std8UML3Bu8dyWDncM7Ls8g+58UOe4XYdlgzXWbrIgjaguP63pCCbMoExKr61B+ztK3tw== dependencies: - "@algolia/cache-common" "4.14.3" + "@algolia/cache-common" "4.19.1" -"@algolia/cache-common@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/cache-common/-/cache-common-4.14.3.tgz#a78e9faee3dfec018eab7b0996e918e06b476ac7" - integrity sha512-oZJofOoD9FQOwiGTzyRnmzvh3ZP8WVTNPBLH5xU5JNF7drDbRT0ocVT0h/xB2rPHYzOeXRrLaQQBwRT/CKom0Q== +"@algolia/cache-common@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/cache-common/-/cache-common-4.19.1.tgz#faa5eeacaffd6023c2cf26e9866bdb06193f9b26" + integrity sha512-XGghi3l0qA38HiqdoUY+wvGyBsGvKZ6U3vTiMBT4hArhP3fOGLXpIINgMiiGjTe4FVlTa5a/7Zf2bwlIHfRqqg== -"@algolia/cache-in-memory@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/cache-in-memory/-/cache-in-memory-4.14.3.tgz#96cefb942aeb80e51e6a7e29f25f4f7f3439b736" - integrity sha512-ES0hHQnzWjeioLQf5Nq+x1AWdZJ50znNPSH3puB/Y4Xsg4Av1bvLmTJe7SY2uqONaeMTvL0OaVcoVtQgJVw0vg== +"@algolia/cache-in-memory@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/cache-in-memory/-/cache-in-memory-4.19.1.tgz#afe4f0f21149800358379871089e0141fb72415b" + integrity sha512-+PDWL+XALGvIginigzu8oU6eWw+o76Z8zHbBovWYcrtWOEtinbl7a7UTt3x3lthv+wNuFr/YD1Gf+B+A9V8n5w== dependencies: - "@algolia/cache-common" "4.14.3" + "@algolia/cache-common" "4.19.1" -"@algolia/client-account@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/client-account/-/client-account-4.14.3.tgz#6d7d032a65c600339ce066505c77013d9a9e4966" - integrity sha512-PBcPb0+f5Xbh5UfLZNx2Ow589OdP8WYjB4CnvupfYBrl9JyC1sdH4jcq/ri8osO/mCZYjZrQsKAPIqW/gQmizQ== +"@algolia/client-account@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/client-account/-/client-account-4.19.1.tgz#1fa65881baab79ad35af6bcf44646a13b8d5edc9" + integrity sha512-Oy0ritA2k7AMxQ2JwNpfaEcgXEDgeyKu0V7E7xt/ZJRdXfEpZcwp9TOg4TJHC7Ia62gIeT2Y/ynzsxccPw92GA== dependencies: - "@algolia/client-common" "4.14.3" - "@algolia/client-search" "4.14.3" - "@algolia/transporter" "4.14.3" + "@algolia/client-common" "4.19.1" + "@algolia/client-search" "4.19.1" + "@algolia/transporter" "4.19.1" -"@algolia/client-analytics@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/client-analytics/-/client-analytics-4.14.3.tgz#ca409d00a8fff98fdcc215dc96731039900055dc" - integrity sha512-eAwQq0Hb/aauv9NhCH5Dp3Nm29oFx28sayFN2fdOWemwSeJHIl7TmcsxVlRsO50fsD8CtPcDhtGeD3AIFLNvqw== +"@algolia/client-analytics@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/client-analytics/-/client-analytics-4.19.1.tgz#e6ed79acd4de5a0284c9696bf4e1c25278ba34db" + integrity sha512-5QCq2zmgdZLIQhHqwl55ZvKVpLM3DNWjFI4T+bHr3rGu23ew2bLO4YtyxaZeChmDb85jUdPDouDlCumGfk6wOg== dependencies: - "@algolia/client-common" "4.14.3" - "@algolia/client-search" "4.14.3" - "@algolia/requester-common" "4.14.3" - "@algolia/transporter" "4.14.3" + "@algolia/client-common" "4.19.1" + "@algolia/client-search" "4.19.1" + "@algolia/requester-common" "4.19.1" + "@algolia/transporter" "4.19.1" -"@algolia/client-common@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/client-common/-/client-common-4.14.3.tgz#c44e48652b2121a20d7a40cfd68d095ebb4191a8" - integrity sha512-jkPPDZdi63IK64Yg4WccdCsAP4pHxSkr4usplkUZM5C1l1oEpZXsy2c579LQ0rvwCs5JFmwfNG4ahOszidfWPw== +"@algolia/client-common@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/client-common/-/client-common-4.19.1.tgz#40a8387316fa61d62ad1091beb3a8e227f008e75" + integrity sha512-3kAIVqTcPrjfS389KQvKzliC559x+BDRxtWamVJt8IVp7LGnjq+aVAXg4Xogkur1MUrScTZ59/AaUd5EdpyXgA== dependencies: - "@algolia/requester-common" "4.14.3" - "@algolia/transporter" "4.14.3" + "@algolia/requester-common" "4.19.1" + "@algolia/transporter" "4.19.1" -"@algolia/client-personalization@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/client-personalization/-/client-personalization-4.14.3.tgz#8f71325035aa2a5fa7d1d567575235cf1d6c654f" - integrity sha512-UCX1MtkVNgaOL9f0e22x6tC9e2H3unZQlSUdnVaSKpZ+hdSChXGaRjp2UIT7pxmPqNCyv51F597KEX5WT60jNg== +"@algolia/client-personalization@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/client-personalization/-/client-personalization-4.19.1.tgz#fe362e0684dc74c3504c3641c5a7488c6ae02e07" + integrity sha512-8CWz4/H5FA+krm9HMw2HUQenizC/DxUtsI5oYC0Jxxyce1vsr8cb1aEiSJArQT6IzMynrERif1RVWLac1m36xw== dependencies: - "@algolia/client-common" "4.14.3" - "@algolia/requester-common" "4.14.3" - "@algolia/transporter" "4.14.3" + "@algolia/client-common" "4.19.1" + "@algolia/requester-common" "4.19.1" + "@algolia/transporter" "4.19.1" -"@algolia/client-search@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-4.14.3.tgz#cf1e77549f5c3e73408ffe6441ede985fde69da0" - integrity sha512-I2U7xBx5OPFdPLA8AXKUPPxGY3HDxZ4r7+mlZ8ZpLbI8/ri6fnu6B4z3wcL7sgHhDYMwnAE8Xr0AB0h3Hnkp4A== +"@algolia/client-search@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-4.19.1.tgz#5e54601aa5f5cea790cec3f2cde4af9d6403871e" + integrity sha512-mBecfMFS4N+yK/p0ZbK53vrZbL6OtWMk8YmnOv1i0LXx4pelY8TFhqKoTit3NPVPwoSNN0vdSN9dTu1xr1XOVw== dependencies: - "@algolia/client-common" "4.14.3" - "@algolia/requester-common" "4.14.3" - "@algolia/transporter" "4.14.3" + "@algolia/client-common" "4.19.1" + "@algolia/requester-common" "4.19.1" + "@algolia/transporter" "4.19.1" "@algolia/events@^4.0.1": version "4.0.1" resolved "https://registry.yarnpkg.com/@algolia/events/-/events-4.0.1.tgz#fd39e7477e7bc703d7f893b556f676c032af3950" integrity sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ== -"@algolia/logger-common@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/logger-common/-/logger-common-4.14.3.tgz#87d4725e7f56ea5a39b605771b7149fff62032a7" - integrity sha512-kUEAZaBt/J3RjYi8MEBT2QEexJR2kAE2mtLmezsmqMQZTV502TkHCxYzTwY2dE7OKcUTxi4OFlMuS4GId9CWPw== +"@algolia/logger-common@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/logger-common/-/logger-common-4.19.1.tgz#0e46a11510f3e94e1afc0ac780ae52e9597be78f" + integrity sha512-i6pLPZW/+/YXKis8gpmSiNk1lOmYCmRI6+x6d2Qk1OdfvX051nRVdalRbEcVTpSQX6FQAoyeaui0cUfLYW5Elw== -"@algolia/logger-console@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/logger-console/-/logger-console-4.14.3.tgz#1f19f8f0a5ef11f01d1f9545290eb6a89b71fb8a" - integrity sha512-ZWqAlUITktiMN2EiFpQIFCJS10N96A++yrexqC2Z+3hgF/JcKrOxOdT4nSCQoEPvU4Ki9QKbpzbebRDemZt/hw== +"@algolia/logger-console@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/logger-console/-/logger-console-4.19.1.tgz#656a6f4ebb5de39af6ef7095c398d9ab3cceb87d" + integrity sha512-jj72k9GKb9W0c7TyC3cuZtTr0CngLBLmc8trzZlXdfvQiigpUdvTi1KoWIb2ZMcRBG7Tl8hSb81zEY3zI2RlXg== dependencies: - "@algolia/logger-common" "4.14.3" + "@algolia/logger-common" "4.19.1" -"@algolia/requester-browser-xhr@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.14.3.tgz#bcf55cba20f58fd9bc95ee55793b5219f3ce8888" - integrity sha512-AZeg2T08WLUPvDncl2XLX2O67W5wIO8MNaT7z5ii5LgBTuk/rU4CikTjCe2xsUleIZeFl++QrPAi4Bdxws6r/Q== +"@algolia/requester-browser-xhr@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.19.1.tgz#7341ea2f980b8980a2976110142026721e452187" + integrity sha512-09K/+t7lptsweRTueHnSnmPqIxbHMowejAkn9XIcJMLdseS3zl8ObnS5GWea86mu3vy4+8H+ZBKkUN82Zsq/zg== dependencies: - "@algolia/requester-common" "4.14.3" + "@algolia/requester-common" "4.19.1" -"@algolia/requester-common@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/requester-common/-/requester-common-4.14.3.tgz#2d02fbe01afb7ae5651ae8dfe62d6c089f103714" - integrity sha512-RrRzqNyKFDP7IkTuV3XvYGF9cDPn9h6qEDl595lXva3YUk9YSS8+MGZnnkOMHvjkrSCKfoLeLbm/T4tmoIeclw== +"@algolia/requester-common@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/requester-common/-/requester-common-4.19.1.tgz#f3396c77631b9d36e8d4d6f819a2c27f9ddbf7a1" + integrity sha512-BisRkcWVxrDzF1YPhAckmi2CFYK+jdMT60q10d7z3PX+w6fPPukxHRnZwooiTUrzFe50UBmLItGizWHP5bDzVQ== -"@algolia/requester-node-http@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/requester-node-http/-/requester-node-http-4.14.3.tgz#72389e1c2e5d964702451e75e368eefe85a09d8f" - integrity sha512-O5wnPxtDRPuW2U0EaOz9rMMWdlhwP0J0eSL1Z7TtXF8xnUeeUyNJrdhV5uy2CAp6RbhM1VuC3sOJcIR6Av+vbA== +"@algolia/requester-node-http@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/requester-node-http/-/requester-node-http-4.19.1.tgz#ea210de9642628b3bdda1dd7fcd1fcb686da442e" + integrity sha512-6DK52DHviBHTG2BK/Vv2GIlEw7i+vxm7ypZW0Z7vybGCNDeWzADx+/TmxjkES2h15+FZOqVf/Ja677gePsVItA== dependencies: - "@algolia/requester-common" "4.14.3" + "@algolia/requester-common" "4.19.1" -"@algolia/transporter@4.14.3": - version "4.14.3" - resolved "https://registry.yarnpkg.com/@algolia/transporter/-/transporter-4.14.3.tgz#5593036bd9cf2adfd077fdc3e81d2e6118660a7a" - integrity sha512-2qlKlKsnGJ008exFRb5RTeTOqhLZj0bkMCMVskxoqWejs2Q2QtWmsiH98hDfpw0fmnyhzHEt0Z7lqxBYp8bW2w== +"@algolia/transporter@4.19.1": + version "4.19.1" + resolved "https://registry.yarnpkg.com/@algolia/transporter/-/transporter-4.19.1.tgz#b5787299740c4bec9ba05502d98c14b5999860c8" + integrity sha512-nkpvPWbpuzxo1flEYqNIbGz7xhfhGOKGAZS7tzC+TELgEmi7z99qRyTfNSUlW7LZmB3ACdnqAo+9A9KFBENviQ== dependencies: - "@algolia/cache-common" "4.14.3" - "@algolia/logger-common" "4.14.3" - "@algolia/requester-common" "4.14.3" + "@algolia/cache-common" "4.19.1" + "@algolia/logger-common" "4.19.1" + "@algolia/requester-common" "4.19.1" -"@ampproject/remapping@^2.1.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" - integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== +"@ampproject/remapping@^2.2.0": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" + integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== dependencies: - "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" "@ant-design/colors@^6.0.0": @@ -145,46 +153,65 @@ dependencies: "@ctrl/tinycolor" "^3.4.0" -"@ant-design/cssinjs@^1.0.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@ant-design/cssinjs/-/cssinjs-1.2.0.tgz#3ba7d79617e0aa617823d7ba3f5d30072a8ab870" - integrity sha512-TjZVH4lgc7jAhYn9yStjnQI7u93b+VAg2oXIyzy12XjnFjCTR+nQIHfuixK+Vm0sx2L8ecDOBDEPT9ldCA7RPQ== +"@ant-design/colors@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@ant-design/colors/-/colors-7.0.0.tgz#eb7eecead124c3533aea05d61254f0a17f2b61b3" + integrity sha512-iVm/9PfGCbC0dSMBrz7oiEXZaaGH7ceU40OJEfKmyuzR9R5CRimJYPlRiFtMQGQcbNMea/ePcoIebi4ASGYXtg== + dependencies: + "@ctrl/tinycolor" "^3.4.0" + +"@ant-design/cssinjs@^1.16.0": + version "1.17.0" + resolved "https://registry.yarnpkg.com/@ant-design/cssinjs/-/cssinjs-1.17.0.tgz#a3f69cf5131539b76ccdbfced43d242557599fea" + integrity sha512-MgGCZ6sfD3yQB0XW0hN4jgixMxApTlDYyct+pc7fRZNO4CaqWWm/9iXkkljNR27lyWLZmm+XiDfcIOo1bnrnMA== dependencies: "@babel/runtime" "^7.11.1" "@emotion/hash" "^0.8.0" "@emotion/unitless" "^0.7.5" classnames "^2.3.1" csstype "^3.0.10" - rc-util "^5.24.2" - shallowequal "^1.1.0" + rc-util "^5.35.0" stylis "^4.0.13" -"@ant-design/icons-svg@^4.2.1": - version "4.2.1" - resolved "https://registry.yarnpkg.com/@ant-design/icons-svg/-/icons-svg-4.2.1.tgz#8630da8eb4471a4aabdaed7d1ff6a97dcb2cf05a" - integrity sha512-EB0iwlKDGpG93hW8f85CTJTs4SvMX7tt5ceupvhALp1IF44SeUFOMhKUOYqpsoYWQKAOuTRDMqn75rEaKDp0Xw== +"@ant-design/icons-svg@^4.3.0": + version "4.3.1" + resolved "https://registry.yarnpkg.com/@ant-design/icons-svg/-/icons-svg-4.3.1.tgz#4b2f65a17d4d32b526baa6414aca2117382bf8da" + integrity sha512-4QBZg8ccyC6LPIRii7A0bZUk3+lEDCLnhB+FVsflGdcWPPmV+j3fire4AwwoqHV/BibgvBmR9ZIo4s867smv+g== "@ant-design/icons@^4.7.0": - version "4.7.0" - resolved "https://registry.yarnpkg.com/@ant-design/icons/-/icons-4.7.0.tgz#8c3cbe0a556ba92af5dc7d1e70c0b25b5179af0f" - integrity sha512-aoB4Z7JA431rt6d4u+8xcNPPCrdufSRMUOpxa1ab6mz1JCQZOEVolj2WVs/tDFmN62zzK30mNelEsprLYsSF3g== + version "4.8.1" + resolved "https://registry.yarnpkg.com/@ant-design/icons/-/icons-4.8.1.tgz#44f6c81f609811d68d48a123eb5dcc477f8fbcb7" + integrity sha512-JRAuiqllnMsiZIO8OvBOeFconprC3cnMpJ9MvXrHh+H5co9rlg8/aSHQfLf5jKKe18lUgRaIwC2pz8YxH9VuCA== dependencies: "@ant-design/colors" "^6.0.0" - "@ant-design/icons-svg" "^4.2.1" + "@ant-design/icons-svg" "^4.3.0" "@babel/runtime" "^7.11.2" classnames "^2.2.6" + lodash "^4.17.15" rc-util "^5.9.4" -"@ant-design/react-slick@~0.29.1": - version "0.29.2" - resolved "https://registry.yarnpkg.com/@ant-design/react-slick/-/react-slick-0.29.2.tgz#53e6a7920ea3562eebb304c15a7fc2d7e619d29c" - integrity sha512-kgjtKmkGHa19FW21lHnAfyyH9AAoh35pBdcJ53rHmQ3O+cfFHGHnUbj/HFrRNJ5vIts09FKJVAD8RpaC+RaWfA== +"@ant-design/icons@^5.2.2": + version "5.2.5" + resolved "https://registry.yarnpkg.com/@ant-design/icons/-/icons-5.2.5.tgz#852474359e271a36e54a4ac115065fae7396277e" + integrity sha512-9Jc59v5fl5dzmxqLWtRev3dJwU7Ya9ZheoI6XmZjZiQ7PRtk77rC+Rbt7GJzAPPg43RQ4YO53RE1u8n+Et97vQ== + dependencies: + "@ant-design/colors" "^7.0.0" + "@ant-design/icons-svg" "^4.3.0" + "@babel/runtime" "^7.11.2" + classnames "^2.2.6" + lodash.camelcase "^4.3.0" + rc-util "^5.31.1" + +"@ant-design/react-slick@~1.0.0": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@ant-design/react-slick/-/react-slick-1.0.2.tgz#241bb412aeacf7ff5d50c61fa5db66773fde6b56" + integrity sha512-Wj8onxL/T8KQLFFiCA4t8eIRGpRR+UPgOdac2sYzonv+i0n3kXHmvHLLiOYL655DQx2Umii9Y9nNgL7ssu5haQ== dependencies: "@babel/runtime" "^7.10.4" classnames "^2.2.5" json2mq "^0.2.0" - lodash "^4.17.21" resize-observer-polyfill "^1.5.1" + throttle-debounce "^5.0.0" "@ardatan/aggregate-error@0.0.6": version "0.0.6" @@ -193,17 +220,18 @@ dependencies: tslib "~2.0.1" -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" - integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.10", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": + version "7.22.13" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" + integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== dependencies: - "@babel/highlight" "^7.18.6" + "@babel/highlight" "^7.22.13" + chalk "^2.4.2" -"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.1", "@babel/compat-data@^7.20.5": - version "7.20.10" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.20.10.tgz#9d92fa81b87542fff50e848ed585b4212c1d34ec" - integrity sha512-sEnuDPpOJR/fcafHMjpcpGN5M2jbUGUHwmuWKM/YdPzeEDJg8bgmbcWQFUfE32MQjti1koACvoPVsDe8Uq+idg== +"@babel/compat-data@^7.22.6", "@babel/compat-data@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.9.tgz#71cdb00a1ce3a329ce4cbec3a44f9fef35669730" + integrity sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ== "@babel/core@7.12.9": version "7.12.9" @@ -227,369 +255,260 @@ semver "^5.4.1" source-map "^0.5.0" -"@babel/core@^7.18.6": - version "7.18.13" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.13.tgz#9be8c44512751b05094a4d3ab05fc53a47ce00ac" - integrity sha512-ZisbOvRRusFktksHSG6pjj1CSvkPkcZq/KHD45LAkVP/oiHJkNBZWfpvlLmX8OtHDG8IuzsFlVRWo08w7Qxn0A== - dependencies: - "@ampproject/remapping" "^2.1.0" - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.18.13" - "@babel/helper-compilation-targets" "^7.18.9" - "@babel/helper-module-transforms" "^7.18.9" - "@babel/helpers" "^7.18.9" - "@babel/parser" "^7.18.13" - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.18.13" - "@babel/types" "^7.18.13" +"@babel/core@^7.18.6", "@babel/core@^7.19.6": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.11.tgz#8033acaa2aa24c3f814edaaa057f3ce0ba559c24" + integrity sha512-lh7RJrtPdhibbxndr6/xx0w8+CVlY5FJZiaSz908Fpy+G0xkBFTvwLcKJFF4PJxVfGhVWNebikpWGnOoC71juQ== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.22.10" + "@babel/generator" "^7.22.10" + "@babel/helper-compilation-targets" "^7.22.10" + "@babel/helper-module-transforms" "^7.22.9" + "@babel/helpers" "^7.22.11" + "@babel/parser" "^7.22.11" + "@babel/template" "^7.22.5" + "@babel/traverse" "^7.22.11" + "@babel/types" "^7.22.11" convert-source-map "^1.7.0" debug "^4.1.0" gensync "^1.0.0-beta.2" - json5 "^2.2.1" - semver "^6.3.0" - -"@babel/core@^7.19.6": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.20.7.tgz#37072f951bd4d28315445f66e0ec9f6ae0c8c35f" - integrity sha512-t1ZjCluspe5DW24bn2Rr1CDb2v9rn/hROtg9a2tmd0+QYf4bsloYfLQzjG4qHPNMhWtKdGC33R5AxGR2Af2cBw== - dependencies: - "@ampproject/remapping" "^2.1.0" - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.20.7" - "@babel/helper-compilation-targets" "^7.20.7" - "@babel/helper-module-transforms" "^7.20.7" - "@babel/helpers" "^7.20.7" - "@babel/parser" "^7.20.7" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.20.7" - "@babel/types" "^7.20.7" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.1" - semver "^6.3.0" + json5 "^2.2.3" + semver "^6.3.1" -"@babel/generator@^7.12.5", "@babel/generator@^7.18.13", "@babel/generator@^7.18.7", "@babel/generator@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.20.7.tgz#f8ef57c8242665c5929fe2e8d82ba75460187b4a" - integrity sha512-7wqMOJq8doJMZmP4ApXTzLxSr7+oO2jroJURrVEp6XShrQUObV8Tq/D0NCcoYg2uHqUrjzO0zwBjoYzelxK+sw== +"@babel/generator@^7.12.5", "@babel/generator@^7.18.7", "@babel/generator@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.10.tgz#c92254361f398e160645ac58831069707382b722" + integrity sha512-79KIf7YiWjjdZ81JnLujDRApWtl7BxTqWD88+FFdQEIOG8LJ0etDOM7CXuIgGJa55sGOwZVwuEsaLEm0PJ5/+A== dependencies: - "@babel/types" "^7.20.7" + "@babel/types" "^7.22.10" "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/helper-annotate-as-pure@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" - integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== +"@babel/helper-annotate-as-pure@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" + integrity sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg== dependencies: - "@babel/types" "^7.18.6" + "@babel/types" "^7.22.5" -"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz#acd4edfd7a566d1d51ea975dff38fd52906981bb" - integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== +"@babel/helper-builder-binary-assignment-operator-visitor@^7.22.5": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.10.tgz#573e735937e99ea75ea30788b57eb52fab7468c9" + integrity sha512-Av0qubwDQxC56DoUReVDeLfMEjYYSN1nZrTUrWkXd7hpU73ymRANkbuDm3yni9npkn+RXy9nNbEJZEzXr7xrfQ== dependencies: - "@babel/helper-explode-assignable-expression" "^7.18.6" - "@babel/types" "^7.18.9" + "@babel/types" "^7.22.10" -"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.20.0", "@babel/helper-compilation-targets@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.7.tgz#a6cd33e93629f5eb473b021aac05df62c4cd09bb" - integrity sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ== +"@babel/helper-compilation-targets@^7.22.10", "@babel/helper-compilation-targets@^7.22.5", "@babel/helper-compilation-targets@^7.22.6": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.10.tgz#01d648bbc25dd88f513d862ee0df27b7d4e67024" + integrity sha512-JMSwHD4J7SLod0idLq5PKgI+6g/hLD/iuWBq08ZX49xE14VpVEojJ5rHWptpirV2j020MvypRLAXAO50igCJ5Q== dependencies: - "@babel/compat-data" "^7.20.5" - "@babel/helper-validator-option" "^7.18.6" - browserslist "^4.21.3" + "@babel/compat-data" "^7.22.9" + "@babel/helper-validator-option" "^7.22.5" + browserslist "^4.21.9" lru-cache "^5.1.1" - semver "^6.3.0" - -"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.20.5", "@babel/helper-create-class-features-plugin@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.20.7.tgz#d0e1f8d7e4ed5dac0389364d9c0c191d948ade6f" - integrity sha512-LtoWbDXOaidEf50hmdDqn9g8VEzsorMexoWMQdQODbvmqYmaF23pBP5VNPAGIFHsFQCIeKokDiz3CH5Y2jlY6w== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-member-expression-to-functions" "^7.20.7" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-replace-supers" "^7.20.7" - "@babel/helper-split-export-declaration" "^7.18.6" - -"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.20.5": - version "7.20.5" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.20.5.tgz#5ea79b59962a09ec2acf20a963a01ab4d076ccca" - integrity sha512-m68B1lkg3XDGX5yCvGO0kPx3v9WIYLnzjKfPcQiwntEQa5ZeRkPmo2X/ISJc8qxWGfwUr+kvZAeEzAwLec2r2w== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - regexpu-core "^5.2.1" - -"@babel/helper-define-polyfill-provider@^0.3.2", "@babel/helper-define-polyfill-provider@^0.3.3": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz#8612e55be5d51f0cd1f36b4a5a83924e89884b7a" - integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== - dependencies: - "@babel/helper-compilation-targets" "^7.17.7" - "@babel/helper-plugin-utils" "^7.16.7" + semver "^6.3.1" + +"@babel/helper-create-class-features-plugin@^7.22.11", "@babel/helper-create-class-features-plugin@^7.22.5": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.11.tgz#4078686740459eeb4af3494a273ac09148dfb213" + integrity sha512-y1grdYL4WzmUDBRGK0pDbIoFd7UZKoDurDzWEoNMYoj1EL+foGRQNyPWDcC+YyegN5y1DUsFFmzjGijB3nSVAQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-function-name" "^7.22.5" + "@babel/helper-member-expression-to-functions" "^7.22.5" + "@babel/helper-optimise-call-expression" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + semver "^6.3.1" + +"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.22.5": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.9.tgz#9d8e61a8d9366fe66198f57c40565663de0825f6" + integrity sha512-+svjVa/tFwsNSG4NEy1h85+HQ5imbT92Q5/bgtS7P0GTQlP8WuFdqsiABmQouhiFGyV66oGxZFpeYHza1rNsKw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + regexpu-core "^5.3.1" + semver "^6.3.1" + +"@babel/helper-define-polyfill-provider@^0.4.2": + version "0.4.2" + resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.4.2.tgz#82c825cadeeeee7aad237618ebbe8fa1710015d7" + integrity sha512-k0qnnOqHn5dK9pZpfD5XXZ9SojAITdCKRn2Lp6rnDGzIbaP0rHyMPk/4wsSxVBVz4RfN0q6VpXWP2pDGIoQ7hw== + dependencies: + "@babel/helper-compilation-targets" "^7.22.6" + "@babel/helper-plugin-utils" "^7.22.5" debug "^4.1.1" lodash.debounce "^4.0.8" resolve "^1.14.2" - semver "^6.1.2" -"@babel/helper-environment-visitor@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" - integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== +"@babel/helper-environment-visitor@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" + integrity sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q== -"@babel/helper-explode-assignable-expression@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096" - integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== +"@babel/helper-function-name@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz#ede300828905bb15e582c037162f99d5183af1be" + integrity sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ== dependencies: - "@babel/types" "^7.18.6" + "@babel/template" "^7.22.5" + "@babel/types" "^7.22.5" -"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.19.0": - version "7.19.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" - integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== +"@babel/helper-hoist-variables@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== dependencies: - "@babel/template" "^7.18.10" - "@babel/types" "^7.19.0" + "@babel/types" "^7.22.5" -"@babel/helper-hoist-variables@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" - integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== +"@babel/helper-member-expression-to-functions@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.22.5.tgz#0a7c56117cad3372fbf8d2fb4bf8f8d64a1e76b2" + integrity sha512-aBiH1NKMG0H2cGZqspNvsaBe6wNGjbJjuLy29aU+eDZjSbbN53BaxlpB02xm9v34pLTZ1nIQPFYn2qMZoa5BQQ== dependencies: - "@babel/types" "^7.18.6" + "@babel/types" "^7.22.5" -"@babel/helper-member-expression-to-functions@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.20.7.tgz#a6f26e919582275a93c3aa6594756d71b0bb7f05" - integrity sha512-9J0CxJLq315fEdi4s7xK5TQaNYjZw+nDVpVqr1axNGKzdrdwYBD5b4uKv3n75aABG0rCCTK8Im8Ww7eYfMrZgw== +"@babel/helper-module-imports@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz#1a8f4c9f4027d23f520bd76b364d44434a72660c" + integrity sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg== dependencies: - "@babel/types" "^7.20.7" + "@babel/types" "^7.22.5" -"@babel/helper-module-imports@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" - integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-module-transforms@^7.12.1", "@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.18.9", "@babel/helper-module-transforms@^7.20.11", "@babel/helper-module-transforms@^7.20.7": - version "7.20.11" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.20.11.tgz#df4c7af713c557938c50ea3ad0117a7944b2f1b0" - integrity sha512-uRy78kN4psmji1s2QtbtcCSaj/LILFDp0f/ymhpQH5QY3nljUZCaNWz9X1dEj/8MBdBEFECs7yRhKn8i7NjZgg== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-simple-access" "^7.20.2" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/helper-validator-identifier" "^7.19.1" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.20.10" - "@babel/types" "^7.20.7" - -"@babel/helper-optimise-call-expression@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe" - integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== +"@babel/helper-module-transforms@^7.12.1", "@babel/helper-module-transforms@^7.22.5", "@babel/helper-module-transforms@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.9.tgz#92dfcb1fbbb2bc62529024f72d942a8c97142129" + integrity sha512-t+WA2Xn5K+rTeGtC8jCsdAH52bjggG5TKRuRrAGNM/mjIbO4GxvlLMFOEz9wXY5I2XQ60PMFsAG2WIcG82dQMQ== + dependencies: + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-module-imports" "^7.22.5" + "@babel/helper-simple-access" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/helper-validator-identifier" "^7.22.5" + +"@babel/helper-optimise-call-expression@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e" + integrity sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw== dependencies: - "@babel/types" "^7.18.6" + "@babel/types" "^7.22.5" -"@babel/helper-plugin-utils@7.10.4", "@babel/helper-plugin-utils@^7.10.4": +"@babel/helper-plugin-utils@7.10.4": version "7.10.4" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz#2f75a831269d4f677de49986dff59927533cf375" integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.20.2.tgz#d1b9000752b18d0877cff85a5c376ce5c3121629" - integrity sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ== - -"@babel/helper-remap-async-to-generator@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz#997458a0e3357080e54e1d79ec347f8a8cd28519" - integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-wrap-function" "^7.18.9" - "@babel/types" "^7.18.9" - -"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.20.7.tgz#243ecd2724d2071532b2c8ad2f0f9f083bcae331" - integrity sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-member-expression-to-functions" "^7.20.7" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.20.7" - "@babel/types" "^7.20.7" - -"@babel/helper-simple-access@^7.20.2": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz#0ab452687fe0c2cfb1e2b9e0015de07fc2d62dd9" - integrity sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA== - dependencies: - "@babel/types" "^7.20.2" - -"@babel/helper-skip-transparent-expression-wrappers@^7.20.0": - version "7.20.0" - resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.20.0.tgz#fbe4c52f60518cab8140d77101f0e63a8a230684" - integrity sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg== - dependencies: - "@babel/types" "^7.20.0" - -"@babel/helper-split-export-declaration@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" - integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-string-parser@^7.19.4": - version "7.19.4" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" - integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== - -"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": - version "7.19.1" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" - integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== - -"@babel/helper-validator-option@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" - integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== - -"@babel/helper-wrap-function@^7.18.9": - version "7.20.5" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.20.5.tgz#75e2d84d499a0ab3b31c33bcfe59d6b8a45f62e3" - integrity sha512-bYMxIWK5mh+TgXGVqAtnu5Yn1un+v8DDZtqyzKRLUzrh70Eal2O3aZ7aPYiMADO4uKlkzOiRiZ6GX5q3qxvW9Q== - dependencies: - "@babel/helper-function-name" "^7.19.0" - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.20.5" - "@babel/types" "^7.20.5" - -"@babel/helpers@^7.12.5", "@babel/helpers@^7.18.9", "@babel/helpers@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.20.7.tgz#04502ff0feecc9f20ecfaad120a18f011a8e6dce" - integrity sha512-PBPjs5BppzsGaxHQCDKnZ6Gd9s6xl8bBCluz3vEInLGRJmnZan4F6BYCeqtyXqkk4W5IlPmjK4JlOuZkpJ3xZA== - dependencies: - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.20.7" - "@babel/types" "^7.20.7" - -"@babel/highlight@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" - integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== - dependencies: - "@babel/helper-validator-identifier" "^7.18.6" - chalk "^2.0.0" +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" + integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== + +"@babel/helper-remap-async-to-generator@^7.22.5", "@babel/helper-remap-async-to-generator@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.9.tgz#53a25b7484e722d7efb9c350c75c032d4628de82" + integrity sha512-8WWC4oR4Px+tr+Fp0X3RHDVfINGpF3ad1HIbrc8A77epiR6eMMc6jsgozkzT2uDiOOdoS9cLIQ+XD2XvI2WSmQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-wrap-function" "^7.22.9" + +"@babel/helper-replace-supers@^7.22.5", "@babel/helper-replace-supers@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.22.9.tgz#cbdc27d6d8d18cd22c81ae4293765a5d9afd0779" + integrity sha512-LJIKvvpgPOPUThdYqcX6IXRuIcTkcAub0IaDRGCZH0p5GPUp7PhRU9QVgFcDDd51BaPkk77ZjqFwh6DZTAEmGg== + dependencies: + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-member-expression-to-functions" "^7.22.5" + "@babel/helper-optimise-call-expression" "^7.22.5" + +"@babel/helper-simple-access@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" + integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-skip-transparent-expression-wrappers@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz#007f15240b5751c537c40e77abb4e89eeaaa8847" + integrity sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-string-parser@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" + integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== + +"@babel/helper-validator-identifier@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" + integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== + +"@babel/helper-validator-option@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" + integrity sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw== + +"@babel/helper-wrap-function@^7.22.9": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.22.10.tgz#d845e043880ed0b8c18bd194a12005cb16d2f614" + integrity sha512-OnMhjWjuGYtdoO3FmsEFWvBStBAe2QOgwOLsLNDjN+aaiMD8InJk1/O3HSD8lkqTjCgg5YI34Tz15KNNA3p+nQ== + dependencies: + "@babel/helper-function-name" "^7.22.5" + "@babel/template" "^7.22.5" + "@babel/types" "^7.22.10" + +"@babel/helpers@^7.12.5", "@babel/helpers@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.22.11.tgz#b02f5d5f2d7abc21ab59eeed80de410ba70b056a" + integrity sha512-vyOXC8PBWaGc5h7GMsNx68OH33cypkEDJCHvYVVgVbbxJDROYVtexSk0gK5iCF1xNjRIN2s8ai7hwkWDq5szWg== + dependencies: + "@babel/template" "^7.22.5" + "@babel/traverse" "^7.22.11" + "@babel/types" "^7.22.11" + +"@babel/highlight@^7.22.13": + version "7.22.13" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.13.tgz#9cda839e5d3be9ca9e8c26b6dd69e7548f0cbf16" + integrity sha512-C/BaXcnnvBCmHTpz/VGZ8jgtE2aYlW4hxDhseJAWZb7gqGM/qtCK6iZUb0TyKFf7BOUsBH7Q7fkRsDRhg1XklQ== + dependencies: + "@babel/helper-validator-identifier" "^7.22.5" + chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/parser@^7.12.7", "@babel/parser@^7.18.13", "@babel/parser@^7.18.8", "@babel/parser@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.20.7.tgz#66fe23b3c8569220817d5feb8b9dcdc95bb4f71b" - integrity sha512-T3Z9oHybU+0vZlY9CiDSJQTD5ZapcW18ZctFMi0MOAl/4BjFF4ul7NVSARLdbGO5vDqy9eQiGTV0LtKfvCYvcg== - -"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2" - integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.20.7.tgz#d9c85589258539a22a901033853101a6198d4ef1" - integrity sha512-sbr9+wNE5aXMBBFBICk01tt7sBf2Oc9ikRFEcem/ZORup9IMUdNhW7/wVLEbbtlWOsEubJet46mHAL2C8+2jKQ== - dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" - "@babel/plugin-proposal-optional-chaining" "^7.20.7" - -"@babel/plugin-proposal-async-generator-functions@^7.20.1": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.20.7.tgz#bfb7276d2d573cb67ba379984a2334e262ba5326" - integrity sha512-xMbiLsn/8RK7Wq7VeVytytS2L6qE69bXPB10YCmMdDZbKF4okCqY74pI/jJQ/8U0b/F6NrT2+14b8/P9/3AMGA== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-remap-async-to-generator" "^7.18.9" - "@babel/plugin-syntax-async-generators" "^7.8.4" - -"@babel/plugin-proposal-class-properties@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" - integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-proposal-class-static-block@^7.18.6": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.20.7.tgz#92592e9029b13b15be0f7ce6a7aedc2879ca45a7" - integrity sha512-AveGOoi9DAjUYYuUAG//Ig69GlazLnoyzMw68VCDux+c1tsnnH/OkYcpz/5xzMkEFC6UxjR5Gw1c+iY2wOGVeQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.20.7" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/plugin-syntax-class-static-block" "^7.14.5" +"@babel/parser@^7.12.7", "@babel/parser@^7.18.8", "@babel/parser@^7.22.11", "@babel/parser@^7.22.5": + version "7.22.14" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.14.tgz#c7de58e8de106e88efca42ce17f0033209dfd245" + integrity sha512-1KucTHgOvaw/LzCVrEOAyXkr9rQlp0A1HiHRYnSUE9dmb8PvPW7o5sscg+5169r54n3vGlbx6GevTE/Iw/P3AQ== -"@babel/plugin-proposal-dynamic-import@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94" - integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.5.tgz#87245a21cd69a73b0b81bcda98d443d6df08f05e" + integrity sha512-NP1M5Rf+u2Gw9qfSO4ihjcTGW5zXTi36ITLd4/EoAcEhIZ0yjMqmftDNl3QC19CX7olhrjpyU454g/2W7X0jvQ== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-proposal-export-namespace-from@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" - integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.22.5.tgz#fef09f9499b1f1c930da8a0c419db42167d792ca" + integrity sha512-31Bb65aZaUwqCbWMnZPduIZxCBngHFlzyN6Dq6KAJjtx+lx6ohKHubc61OomYi7XwVD4Ol0XCVz4h+pYFR048g== dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - -"@babel/plugin-proposal-json-strings@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b" - integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-json-strings" "^7.8.3" - -"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.20.7.tgz#dfbcaa8f7b4d37b51e8bfb46d94a5aea2bb89d83" - integrity sha512-y7C7cZgpMIjWlKE5T7eJwp+tnRYM89HmRvWM5EQuB5BoHEONjmQ8lSNmBUwOyy/GFRsohJED51YBF79hE1djug== - dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - -"@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" - integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - -"@babel/plugin-proposal-numeric-separator@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" - integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + "@babel/plugin-transform-optional-chaining" "^7.22.5" "@babel/plugin-proposal-object-rest-spread@7.12.1": version "7.12.1" @@ -600,59 +519,10 @@ "@babel/plugin-syntax-object-rest-spread" "^7.8.0" "@babel/plugin-transform-parameters" "^7.12.1" -"@babel/plugin-proposal-object-rest-spread@^7.20.2": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz#aa662940ef425779c75534a5c41e9d936edc390a" - integrity sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg== - dependencies: - "@babel/compat-data" "^7.20.5" - "@babel/helper-compilation-targets" "^7.20.7" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.20.7" - -"@babel/plugin-proposal-optional-catch-binding@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb" - integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - -"@babel/plugin-proposal-optional-chaining@^7.18.9", "@babel/plugin-proposal-optional-chaining@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.20.7.tgz#49f2b372519ab31728cc14115bb0998b15bfda55" - integrity sha512-T+A7b1kfjtRM51ssoOfS1+wbyCVqorfyZhT99TvxxLMirPShD8CzKMRepMlCBGM5RpHMbn8s+5MMHnPstJH6mQ== - dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - -"@babel/plugin-proposal-private-methods@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" - integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-proposal-private-property-in-object@^7.18.6": - version "7.20.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.20.5.tgz#309c7668f2263f1c711aa399b5a9a6291eef6135" - integrity sha512-Vq7b9dUA12ByzB4EjQTPo25sFhY+08pQDBSZRtUAkj7lb7jahaHR5igera16QZ+3my1nYR4dKsNdYj5IjPHilQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-create-class-features-plugin" "^7.20.5" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - -"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e" - integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" +"@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2": + version "7.21.0-placeholder-for-preset-env.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz#7844f9289546efa9febac2de4cfe358a050bd703" + integrity sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w== "@babel/plugin-syntax-async-generators@^7.8.4": version "7.8.4" @@ -689,12 +559,26 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.3" -"@babel/plugin-syntax-import-assertions@^7.20.0": - version "7.20.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.20.0.tgz#bb50e0d4bea0957235390641209394e87bdb9cc4" - integrity sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ== +"@babel/plugin-syntax-import-assertions@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.22.5.tgz#07d252e2aa0bc6125567f742cd58619cb14dce98" + integrity sha512-rdV97N7KqsRzeNGoWUOK6yUsWarLjE5Su/Snk9IYPU9CwkWHs4t+rTGOvffTR8XGkJMTAdLfO0xVnXm8wugIJg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-syntax-import-attributes@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.22.5.tgz#ab840248d834410b829f569f5262b9e517555ecb" + integrity sha512-KwvoWDeNKPETmozyFE0P2rOLqh39EoQHNjqizrI5B8Vt0ZNS7M56s7dAiAqbYfiAYOuIzIh96z3iR2ktgu3tEg== dependencies: - "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-syntax-import-meta@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-json-strings@^7.8.3": version "7.8.3" @@ -710,12 +594,12 @@ dependencies: "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-syntax-jsx@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" - integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== +"@babel/plugin-syntax-jsx@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz#a6b68e84fb76e759fc3b93e901876ffabbe1d918" + integrity sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-logical-assignment-operators@^7.10.4": version "7.10.4" @@ -773,357 +657,491 @@ dependencies: "@babel/helper-plugin-utils" "^7.14.5" -"@babel/plugin-syntax-typescript@^7.20.0": - version "7.20.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.20.0.tgz#4e9a0cfc769c85689b77a2e642d24e9f697fc8c7" - integrity sha512-rd9TkG+u1CExzS4SM1BlMEhMXwFLKVjOAFFCDx9PbX5ycJWDoWMcwdJH9RhkPu1dOgn5TrxLot/Gx6lWFuAUNQ== +"@babel/plugin-syntax-typescript@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz#aac8d383b062c5072c647a31ef990c1d0af90272" + integrity sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ== dependencies: - "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-arrow-functions@^7.18.6": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.20.7.tgz#bea332b0e8b2dab3dafe55a163d8227531ab0551" - integrity sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ== +"@babel/plugin-syntax-unicode-sets-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz#d49a3b3e6b52e5be6740022317580234a6a47357" + integrity sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" -"@babel/plugin-transform-async-to-generator@^7.18.6": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.20.7.tgz#dfee18623c8cb31deb796aa3ca84dda9cea94354" - integrity sha512-Uo5gwHPT9vgnSXQxqGtpdufUiWp96gk7yiP4Mp5bm1QMkEmLXBO7PAGYbKoJ6DhAwiNkcHFBol/x5zZZkL/t0Q== +"@babel/plugin-transform-arrow-functions@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.22.5.tgz#e5ba566d0c58a5b2ba2a8b795450641950b71958" + integrity sha512-26lTNXoVRdAnsaDXPpvCNUq+OVWEVC6bx7Vvz9rC53F2bagUWW4u4ii2+h8Fejfh7RYqPxn+libeFBBck9muEw== dependencies: - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-remap-async-to-generator" "^7.18.9" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-block-scoped-functions@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" - integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== +"@babel/plugin-transform-async-generator-functions@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.22.11.tgz#dbe3b1ff5a52e2e5edc4b19a60d325a675ed2649" + integrity sha512-0pAlmeRJn6wU84zzZsEOx1JV1Jf8fqO9ok7wofIJwUnplYo247dcd24P+cMJht7ts9xkzdtB0EPHmOb7F+KzXw== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-remap-async-to-generator" "^7.22.9" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-transform-async-to-generator@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.22.5.tgz#c7a85f44e46f8952f6d27fe57c2ed3cc084c3775" + integrity sha512-b1A8D8ZzE/VhNDoV1MSJTnpKkCG5bJo+19R4o4oy03zM7ws8yEMK755j61Dc3EyvdysbqH5BOOTquJ7ZX9C6vQ== + dependencies: + "@babel/helper-module-imports" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-remap-async-to-generator" "^7.22.5" + +"@babel/plugin-transform-block-scoped-functions@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.22.5.tgz#27978075bfaeb9fa586d3cb63a3d30c1de580024" + integrity sha512-tdXZ2UdknEKQWKJP1KMNmuF5Lx3MymtMN/pvA+p/VEkhK8jVcQ1fzSy8KM9qRYhAf2/lV33hoMPKI/xaI9sADA== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-block-scoping@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.22.10.tgz#88a1dccc3383899eb5e660534a76a22ecee64faa" + integrity sha512-1+kVpGAOOI1Albt6Vse7c8pHzcZQdQKW+wJH+g8mCaszOdDVwRXa/slHPqIw+oJAJANTKDMuM2cBdV0Dg618Vg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-block-scoping@^7.20.2": - version "7.20.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.20.11.tgz#9f5a3424bd112a3f32fe0cf9364fbb155cff262a" - integrity sha512-tA4N427a7fjf1P0/2I4ScsHGc5jcHPbb30xMbaTke2gxDuWpUfXDuX1FEymJwKk4tuGUvGcejAR6HdZVqmmPyw== - dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - -"@babel/plugin-transform-classes@^7.20.2": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.20.7.tgz#f438216f094f6bb31dc266ebfab8ff05aecad073" - integrity sha512-LWYbsiXTPKl+oBlXUGlwNlJZetXD5Am+CyBdqhPsDVjM9Jc8jwBJFrKhHf900Kfk2eZG1y9MAG3UNajol7A4VQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-compilation-targets" "^7.20.7" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-replace-supers" "^7.20.7" - "@babel/helper-split-export-declaration" "^7.18.6" +"@babel/plugin-transform-class-properties@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.22.5.tgz#97a56e31ad8c9dc06a0b3710ce7803d5a48cca77" + integrity sha512-nDkQ0NfkOhPTq8YCLiWNxp1+f9fCobEjCb0n8WdbNUBc4IB5V7P1QnX9IjpSoquKrXF5SKojHleVNs2vGeHCHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-class-static-block@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.22.11.tgz#dc8cc6e498f55692ac6b4b89e56d87cec766c974" + integrity sha512-GMM8gGmqI7guS/llMFk1bJDkKfn3v3C4KHK9Yg1ey5qcHcOlKb0QvcMrgzvxo+T03/4szNh5lghY+fEC98Kq9g== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.22.11" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-transform-classes@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.22.6.tgz#e04d7d804ed5b8501311293d1a0e6d43e94c3363" + integrity sha512-58EgM6nuPNG6Py4Z3zSuu0xWu2VfodiMi72Jt5Kj2FECmaYk1RrTXA45z6KBFsu9tRgwQDwIiY4FXTt+YsSFAQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-compilation-targets" "^7.22.6" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-function-name" "^7.22.5" + "@babel/helper-optimise-call-expression" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" globals "^11.1.0" -"@babel/plugin-transform-computed-properties@^7.18.9": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.20.7.tgz#704cc2fd155d1c996551db8276d55b9d46e4d0aa" - integrity sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ== +"@babel/plugin-transform-computed-properties@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.22.5.tgz#cd1e994bf9f316bd1c2dafcd02063ec261bb3869" + integrity sha512-4GHWBgRf0krxPX+AaPtgBAlTgTeZmqDynokHOX7aqqAB4tHs3U2Y02zH6ETFdLZGcg9UQSD1WCmkVrE9ErHeOg== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/template" "^7.20.7" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/template" "^7.22.5" -"@babel/plugin-transform-destructuring@^7.20.2": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.20.7.tgz#8bda578f71620c7de7c93af590154ba331415454" - integrity sha512-Xwg403sRrZb81IVB79ZPqNQME23yhugYVqgTxAhT99h485F4f+GMELFhhOsscDUB7HCswepKeCKLn/GZvUKoBA== +"@babel/plugin-transform-destructuring@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.22.10.tgz#38e2273814a58c810b6c34ea293be4973c4eb5e2" + integrity sha512-dPJrL0VOyxqLM9sritNbMSGx/teueHF/htMKrPT7DNxccXxRDPYqlgPFFdr8u+F+qUZOkZoXue/6rL5O5GduEw== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz#b286b3e7aae6c7b861e45bed0a2fafd6b1a4fef8" - integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== +"@babel/plugin-transform-dotall-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.22.5.tgz#dbb4f0e45766eb544e193fb00e65a1dd3b2a4165" + integrity sha512-5/Yk9QxCQCl+sOIB1WelKnVRxTJDSAIxtJLL2/pqL14ZVlbH0fUQUZa/T5/UnQtBNgghR7mfB8ERBKyKPCi7Vw== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-duplicate-keys@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz#687f15ee3cdad6d85191eb2a372c4528eaa0ae0e" - integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== +"@babel/plugin-transform-duplicate-keys@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.22.5.tgz#b6e6428d9416f5f0bba19c70d1e6e7e0b88ab285" + integrity sha512-dEnYD+9BBgld5VBXHnF/DbYGp3fqGMsyxKbtD1mDyIA7AkTSpKXFhCVuj/oQVOoALfBs77DudA0BE4d5mcpmqw== dependencies: - "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-exponentiation-operator@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz#421c705f4521888c65e91fdd1af951bfefd4dacd" - integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== +"@babel/plugin-transform-dynamic-import@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.22.11.tgz#2c7722d2a5c01839eaf31518c6ff96d408e447aa" + integrity sha512-g/21plo58sfteWjaO0ZNVb+uEOkJNjAaHhbejrnBmu011l/eNDScmkbjCC3l4FKb10ViaGU4aOkFznSu2zRHgA== dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" -"@babel/plugin-transform-for-of@^7.18.8": - version "7.18.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz#6ef8a50b244eb6a0bdbad0c7c61877e4e30097c1" - integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== +"@babel/plugin-transform-exponentiation-operator@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.22.5.tgz#402432ad544a1f9a480da865fda26be653e48f6a" + integrity sha512-vIpJFNM/FjZ4rh1myqIya9jXwrwwgFRHPjT3DkUA9ZLHuzox8jiXkOLvwm1H+PQIP3CqfC++WPKeuDi0Sjdj1g== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-function-name@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" - integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== +"@babel/plugin-transform-export-namespace-from@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.22.11.tgz#b3c84c8f19880b6c7440108f8929caf6056db26c" + integrity sha512-xa7aad7q7OiT8oNZ1mU7NrISjlSkVdMbNxn9IuLZyL9AJEhs1Apba3I+u5riX1dIkdptP5EKDG5XDPByWxtehw== dependencies: - "@babel/helper-compilation-targets" "^7.18.9" - "@babel/helper-function-name" "^7.18.9" - "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" -"@babel/plugin-transform-literals@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" - integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== +"@babel/plugin-transform-for-of@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.22.5.tgz#ab1b8a200a8f990137aff9a084f8de4099ab173f" + integrity sha512-3kxQjX1dU9uudwSshyLeEipvrLjBCVthCgeTp6CzE/9JYrlAIaeekVxRpCWsDDfYTfRZRoCeZatCQvwo+wvK8A== dependencies: - "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-member-expression-literals@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" - integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== +"@babel/plugin-transform-function-name@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.22.5.tgz#935189af68b01898e0d6d99658db6b164205c143" + integrity sha512-UIzQNMS0p0HHiQm3oelztj+ECwFnj+ZRV4KnguvlsD2of1whUeM6o7wGNj6oLwcDoAXQ8gEqfgC24D+VdIcevg== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-compilation-targets" "^7.22.5" + "@babel/helper-function-name" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-amd@^7.19.6": - version "7.20.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.20.11.tgz#3daccca8e4cc309f03c3a0c4b41dc4b26f55214a" - integrity sha512-NuzCt5IIYOW0O30UvqktzHYR2ud5bOWbY0yaxWZ6G+aFzOMJvrs5YHNikrbdaT15+KNO31nPOy5Fim3ku6Zb5g== +"@babel/plugin-transform-json-strings@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.22.11.tgz#689a34e1eed1928a40954e37f74509f48af67835" + integrity sha512-CxT5tCqpA9/jXFlme9xIBCc5RPtdDq3JpkkhgHQqtDdiTnTI0jtZ0QzXhr5DILeYifDPp2wvY2ad+7+hLMW5Pw== dependencies: - "@babel/helper-module-transforms" "^7.20.11" - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-json-strings" "^7.8.3" -"@babel/plugin-transform-modules-commonjs@^7.19.6": - version "7.20.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.20.11.tgz#8cb23010869bf7669fd4b3098598b6b2be6dc607" - integrity sha512-S8e1f7WQ7cimJQ51JkAaDrEtohVEitXjgCGAS2N8S31Y42E+kWwfSz83LYz57QdBm7q9diARVqanIaH2oVgQnw== +"@babel/plugin-transform-literals@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.22.5.tgz#e9341f4b5a167952576e23db8d435849b1dd7920" + integrity sha512-fTLj4D79M+mepcw3dgFBTIDYpbcB9Sm0bpm4ppXPaO+U+PKFFyV9MGRvS0gvGw62sd10kT5lRMKXAADb9pWy8g== dependencies: - "@babel/helper-module-transforms" "^7.20.11" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-simple-access" "^7.20.2" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-systemjs@^7.19.6": - version "7.20.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.20.11.tgz#467ec6bba6b6a50634eea61c9c232654d8a4696e" - integrity sha512-vVu5g9BPQKSFEmvt2TA4Da5N+QVS66EX21d8uoOihC+OCpUoGvzVsXeqFdtAEfVa5BILAeFt+U7yVmLbQnAJmw== +"@babel/plugin-transform-logical-assignment-operators@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.22.11.tgz#24c522a61688bde045b7d9bc3c2597a4d948fc9c" + integrity sha512-qQwRTP4+6xFCDV5k7gZBF3C31K34ut0tbEcTKxlX/0KXxm9GLcO14p570aWxFvVzx6QAfPgq7gaeIHXJC8LswQ== dependencies: - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-module-transforms" "^7.20.11" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-validator-identifier" "^7.19.1" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" -"@babel/plugin-transform-modules-umd@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz#81d3832d6034b75b54e62821ba58f28ed0aab4b9" - integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== +"@babel/plugin-transform-member-expression-literals@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.22.5.tgz#4fcc9050eded981a468347dd374539ed3e058def" + integrity sha512-RZEdkNtzzYCFl9SE9ATaUMTj2hqMb4StarOJLrZRbqqU4HSBE7UlBw9WBWQiDzrJZJdUWiMTVDI6Gv/8DPvfew== dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-named-capturing-groups-regex@^7.19.1": - version "7.20.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.20.5.tgz#626298dd62ea51d452c3be58b285d23195ba69a8" - integrity sha512-mOW4tTzi5iTLnw+78iEq3gr8Aoq4WNRGpmSlrogqaiCBoR1HFhpU4JkpQFOHfeYx3ReVIFWOQJS4aZBRvuZ6mA== +"@babel/plugin-transform-modules-amd@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.22.5.tgz#4e045f55dcf98afd00f85691a68fc0780704f526" + integrity sha512-R+PTfLTcYEmb1+kK7FNkhQ1gP4KgjpSO6HfH9+f8/yfp2Nt3ggBjiVpRwmwTlfqZLafYKJACy36yDXlEmI9HjQ== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.20.5" - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-module-transforms" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-new-target@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz#d128f376ae200477f37c4ddfcc722a8a1b3246a8" - integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== +"@babel/plugin-transform-modules-commonjs@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.22.11.tgz#d7991d3abad199c03b68ee66a64f216c47ffdfae" + integrity sha512-o2+bg7GDS60cJMgz9jWqRUsWkMzLCxp+jFDeDUT5sjRlAxcJWZ2ylNdI7QQ2+CH5hWu7OnN+Cv3htt7AkSf96g== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-module-transforms" "^7.22.9" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-simple-access" "^7.22.5" -"@babel/plugin-transform-object-super@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" - integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== +"@babel/plugin-transform-modules-systemjs@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.22.11.tgz#3386be5875d316493b517207e8f1931d93154bb1" + integrity sha512-rIqHmHoMEOhI3VkVf5jQ15l539KrwhzqcBO6wdCNWPWc/JWt9ILNYNUssbRpeq0qWns8svuw8LnMNCvWBIJ8wA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-replace-supers" "^7.18.6" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-module-transforms" "^7.22.9" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.5" -"@babel/plugin-transform-parameters@^7.12.1", "@babel/plugin-transform-parameters@^7.20.1", "@babel/plugin-transform-parameters@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.20.7.tgz#0ee349e9d1bc96e78e3b37a7af423a4078a7083f" - integrity sha512-WiWBIkeHKVOSYPO0pWkxGPfKeWrCJyD3NJ53+Lrp/QMSZbsVPovrVl2aWZ19D/LTVnaDv5Ap7GJ/B2CTOZdrfA== +"@babel/plugin-transform-modules-umd@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.22.5.tgz#4694ae40a87b1745e3775b6a7fe96400315d4f98" + integrity sha512-+S6kzefN/E1vkSsKx8kmQuqeQsvCKCd1fraCM7zXm4SFoggI099Tr4G8U81+5gtMdUeMQ4ipdQffbKLX0/7dBQ== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-module-transforms" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-property-literals@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" - integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== +"@babel/plugin-transform-named-capturing-groups-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz#67fe18ee8ce02d57c855185e27e3dc959b2e991f" + integrity sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-new-target@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.22.5.tgz#1b248acea54ce44ea06dfd37247ba089fcf9758d" + integrity sha512-AsF7K0Fx/cNKVyk3a+DW0JLo+Ua598/NxMRvxDnkpCIGFh43+h/v2xyhRUYf6oD8gE4QtL83C7zZVghMjHd+iw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-nullish-coalescing-operator@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.22.11.tgz#debef6c8ba795f5ac67cd861a81b744c5d38d9fc" + integrity sha512-YZWOw4HxXrotb5xsjMJUDlLgcDXSfO9eCmdl1bgW4+/lAGdkjaEvOnQ4p5WKKdUgSzO39dgPl0pTnfxm0OAXcg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-transform-numeric-separator@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.22.11.tgz#498d77dc45a6c6db74bb829c02a01c1d719cbfbd" + integrity sha512-3dzU4QGPsILdJbASKhF/V2TVP+gJya1PsueQCxIPCEcerqF21oEcrob4mzjsp2Py/1nLfF5m+xYNMDpmA8vffg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-transform-object-rest-spread@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.22.11.tgz#dbbb06ce783cd994a8f430d8cefa553e9b42ca62" + integrity sha512-nX8cPFa6+UmbepISvlf5jhQyaC7ASs/7UxHmMkuJ/k5xSHvDPPaibMo+v3TXwU/Pjqhep/nFNpd3zn4YR59pnw== + dependencies: + "@babel/compat-data" "^7.22.9" + "@babel/helper-compilation-targets" "^7.22.10" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.22.5" + +"@babel/plugin-transform-object-super@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.22.5.tgz#794a8d2fcb5d0835af722173c1a9d704f44e218c" + integrity sha512-klXqyaT9trSjIUrcsYIfETAzmOEZL3cBYqOYLJxBHfMFFggmXOv+NYSX/Jbs9mzMVESw/WycLFPRx8ba/b2Ipw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.5" + +"@babel/plugin-transform-optional-catch-binding@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.22.11.tgz#461cc4f578a127bb055527b3e77404cad38c08e0" + integrity sha512-rli0WxesXUeCJnMYhzAglEjLWVDF6ahb45HuprcmQuLidBJFWjNnOzssk2kuc6e33FlLaiZhG/kUIzUMWdBKaQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-transform-optional-chaining@^7.22.12", "@babel/plugin-transform-optional-chaining@^7.22.5": + version "7.22.12" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.22.12.tgz#d7ebf6a88cd2f4d307b0e000ab630acd8124b333" + integrity sha512-7XXCVqZtyFWqjDsYDY4T45w4mlx1rf7aOgkc/Ww76xkgBiOlmjPkx36PBLHa1k1rwWvVgYMPsbuVnIamx2ZQJw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-transform-parameters@^7.12.1", "@babel/plugin-transform-parameters@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.22.5.tgz#c3542dd3c39b42c8069936e48717a8d179d63a18" + integrity sha512-AVkFUBurORBREOmHRKo06FjHYgjrabpdqRSwq6+C7R5iTCZOsM4QbcB27St0a4U6fffyAOqh3s/qEfybAhfivg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-private-methods@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.22.5.tgz#21c8af791f76674420a147ae62e9935d790f8722" + integrity sha512-PPjh4gyrQnGe97JTalgRGMuU4icsZFnWkzicB/fUtzlKUqvsWBKEpPPfr5a2JiyirZkHxnAqkQMO5Z5B2kK3fA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-private-property-in-object@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.22.11.tgz#ad45c4fc440e9cb84c718ed0906d96cf40f9a4e1" + integrity sha512-sSCbqZDBKHetvjSwpyWzhuHkmW5RummxJBVbYLkGkaiTOWGxml7SXt0iWa03bzxFIx7wOj3g/ILRd0RcJKBeSQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-create-class-features-plugin" "^7.22.11" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-transform-property-literals@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.22.5.tgz#b5ddabd73a4f7f26cd0e20f5db48290b88732766" + integrity sha512-TiOArgddK3mK/x1Qwf5hay2pxI6wCZnvQqrFSqbtg1GLl2JcNMitVH/YnqjP+M31pLUeTfzY1HAXFDnUBV30rQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-react-constant-elements@^7.18.12": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.20.2.tgz#3f02c784e0b711970d7d8ccc96c4359d64e27ac7" - integrity sha512-KS/G8YI8uwMGKErLFOHS/ekhqdHhpEloxs43NecQHVgo2QuQSyJhGIY1fL8UGl9wy5ItVwwoUL4YxVqsplGq2g== + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.22.5.tgz#6dfa7c1c37f7d7279e417ceddf5a04abb8bb9c29" + integrity sha512-BF5SXoO+nX3h5OhlN78XbbDrBOffv+AxPP2ENaJOVqjWCgBDeOY3WcaUcddutGSfoap+5NEQ/q/4I3WZIvgkXA== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-react-display-name@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415" - integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== +"@babel/plugin-transform-react-display-name@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.22.5.tgz#3c4326f9fce31c7968d6cb9debcaf32d9e279a2b" + integrity sha512-PVk3WPYudRF5z4GKMEYUrLjPl38fJSKNaEOkFuoprioowGuWN6w2RKznuFNSlJx7pzzXXStPUnNSOEO0jL5EVw== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-react-jsx-development@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz#dbe5c972811e49c7405b630e4d0d2e1380c0ddc5" - integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA== +"@babel/plugin-transform-react-jsx-development@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.22.5.tgz#e716b6edbef972a92165cd69d92f1255f7e73e87" + integrity sha512-bDhuzwWMuInwCYeDeMzyi7TaBgRQei6DqxhbyniL7/VG4RSS7HtSL2QbY4eESy1KJqlWt8g3xeEBGPuo+XqC8A== dependencies: - "@babel/plugin-transform-react-jsx" "^7.18.6" + "@babel/plugin-transform-react-jsx" "^7.22.5" -"@babel/plugin-transform-react-jsx@^7.18.6": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.20.7.tgz#025d85a1935fd7e19dfdcb1b1d4df34d4da484f7" - integrity sha512-Tfq7qqD+tRj3EoDhY00nn2uP2hsRxgYGi5mLQ5TimKav0a9Lrpd4deE+fcLXU8zFYRjlKPHZhpCvfEA6qnBxqQ== +"@babel/plugin-transform-react-jsx@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.22.5.tgz#932c291eb6dd1153359e2a90cb5e557dcf068416" + integrity sha512-rog5gZaVbUip5iWDMTYbVM15XQq+RkUKhET/IHR6oizR+JEoN6CAfTTuHcK4vwUyzca30qqHqEpzBOnaRMWYMA== dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/plugin-syntax-jsx" "^7.18.6" - "@babel/types" "^7.20.7" + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-module-imports" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-jsx" "^7.22.5" + "@babel/types" "^7.22.5" -"@babel/plugin-transform-react-pure-annotations@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz#561af267f19f3e5d59291f9950fd7b9663d0d844" - integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ== +"@babel/plugin-transform-react-pure-annotations@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.22.5.tgz#1f58363eef6626d6fa517b95ac66fe94685e32c0" + integrity sha512-gP4k85wx09q+brArVinTXhWiyzLl9UpmGva0+mWyKxk6JZequ05x3eUcIUE+FyttPKJFRRVtAvQaJ6YF9h1ZpA== dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-regenerator@^7.18.6": - version "7.20.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.20.5.tgz#57cda588c7ffb7f4f8483cc83bdcea02a907f04d" - integrity sha512-kW/oO7HPBtntbsahzQ0qSE3tFvkFwnbozz3NWFhLGqH75vLEg+sCGngLlhVkePlCs3Jv0dBBHDzCHxNiFAQKCQ== +"@babel/plugin-transform-regenerator@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.22.10.tgz#8ceef3bd7375c4db7652878b0241b2be5d0c3cca" + integrity sha512-F28b1mDt8KcT5bUyJc/U9nwzw6cV+UmTeRlXYIl2TNqMMJif0Jeey9/RQ3C4NOd2zp0/TRsDns9ttj2L523rsw== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - regenerator-transform "^0.15.1" + "@babel/helper-plugin-utils" "^7.22.5" + regenerator-transform "^0.15.2" -"@babel/plugin-transform-reserved-words@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz#b1abd8ebf8edaa5f7fe6bbb8d2133d23b6a6f76a" - integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== +"@babel/plugin-transform-reserved-words@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.22.5.tgz#832cd35b81c287c4bcd09ce03e22199641f964fb" + integrity sha512-DTtGKFRQUDm8svigJzZHzb/2xatPc6TzNvAIJ5GqOKDsGFYgAskjRulbR/vGsPKq3OPqtexnz327qYpP57RFyA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-runtime@^7.18.6": - version "7.18.10" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.18.10.tgz#37d14d1fa810a368fd635d4d1476c0154144a96f" - integrity sha512-q5mMeYAdfEbpBAgzl7tBre/la3LeCxmDO1+wMXRdPWbcoMjR3GiXlCLk7JBZVVye0bqTGNMbt0yYVXX1B1jEWQ== - dependencies: - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.9" - babel-plugin-polyfill-corejs2 "^0.3.2" - babel-plugin-polyfill-corejs3 "^0.5.3" - babel-plugin-polyfill-regenerator "^0.4.0" - semver "^6.3.0" + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.22.10.tgz#89eda6daf1d3af6f36fb368766553054c8d7cd46" + integrity sha512-RchI7HePu1eu0CYNKHHHQdfenZcM4nz8rew5B1VWqeRKdcwW5aQ5HeG9eTUbWiAS1UrmHVLmoxTWHt3iLD/NhA== + dependencies: + "@babel/helper-module-imports" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + babel-plugin-polyfill-corejs2 "^0.4.5" + babel-plugin-polyfill-corejs3 "^0.8.3" + babel-plugin-polyfill-regenerator "^0.5.2" + semver "^6.3.1" -"@babel/plugin-transform-shorthand-properties@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" - integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== +"@babel/plugin-transform-shorthand-properties@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.22.5.tgz#6e277654be82b5559fc4b9f58088507c24f0c624" + integrity sha512-vM4fq9IXHscXVKzDv5itkO1X52SmdFBFcMIBZ2FRn2nqVYqw6dBexUgMvAjHW+KXpPPViD/Yo3GrDEBaRC0QYA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-spread@^7.19.0": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.20.7.tgz#c2d83e0b99d3bf83e07b11995ee24bf7ca09401e" - integrity sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw== +"@babel/plugin-transform-spread@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.22.5.tgz#6487fd29f229c95e284ba6c98d65eafb893fea6b" + integrity sha512-5ZzDQIGyvN4w8+dMmpohL6MBo+l2G7tfC/O2Dg7/hjpgeWvUx8FzfeOKxGog9IimPa4YekaQ9PlDqTLOljkcxg== dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" -"@babel/plugin-transform-sticky-regex@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz#c6706eb2b1524028e317720339583ad0f444adcc" - integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== +"@babel/plugin-transform-sticky-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.22.5.tgz#295aba1595bfc8197abd02eae5fc288c0deb26aa" + integrity sha512-zf7LuNpHG0iEeiyCNwX4j3gDg1jgt1k3ZdXBKbZSoA3BbGQGvMiSvfbZRR3Dr3aeJe3ooWFZxOOG3IRStYp2Bw== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-template-literals@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" - integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== +"@babel/plugin-transform-template-literals@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.22.5.tgz#8f38cf291e5f7a8e60e9f733193f0bcc10909bff" + integrity sha512-5ciOehRNf+EyUeewo8NkbQiUs4d6ZxiHo6BcBcnFlgiJfu16q0bQUw9Jvo0b0gBKFG1SMhDSjeKXSYuJLeFSMA== dependencies: - "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-typeof-symbol@^7.18.9": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz#c8cea68263e45addcd6afc9091429f80925762c0" - integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== +"@babel/plugin-transform-typeof-symbol@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.22.5.tgz#5e2ba478da4b603af8673ff7c54f75a97b716b34" + integrity sha512-bYkI5lMzL4kPii4HHEEChkD0rkc+nvnlR6+o/qdqR6zrm0Sv/nodmyLhlq2DO0YKLUNd2VePmPRjJXSBh9OIdA== dependencies: - "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-typescript@^7.18.6": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.20.7.tgz#673f49499cd810ae32a1ea5f3f8fab370987e055" - integrity sha512-m3wVKEvf6SoszD8pu4NZz3PvfKRCMgk6D6d0Qi9hNnlM5M6CFS92EgF4EiHVLKbU0r/r7ty1hg7NPZwE7WRbYw== +"@babel/plugin-transform-typescript@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.22.11.tgz#9f27fb5e51585729374bb767ab6a6d9005a23329" + integrity sha512-0E4/L+7gfvHub7wsbTv03oRtD69X31LByy44fGmFzbZScpupFByMcgCJ0VbBTkzyjSJKuRoGN8tcijOWKTmqOA== dependencies: - "@babel/helper-create-class-features-plugin" "^7.20.7" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/plugin-syntax-typescript" "^7.20.0" + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-create-class-features-plugin" "^7.22.11" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-typescript" "^7.22.5" -"@babel/plugin-transform-unicode-escapes@^7.18.10": - version "7.18.10" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz#1ecfb0eda83d09bbcb77c09970c2dd55832aa246" - integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== +"@babel/plugin-transform-unicode-escapes@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.22.10.tgz#c723f380f40a2b2f57a62df24c9005834c8616d9" + integrity sha512-lRfaRKGZCBqDlRU3UIFovdp9c9mEvlylmpod0/OatICsSfuQ9YFthRo1tpTkGsklEefZdqlEFdY4A2dwTb6ohg== dependencies: - "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-unicode-regex@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz#194317225d8c201bbae103364ffe9e2cea36cdca" - integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== +"@babel/plugin-transform-unicode-property-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.22.5.tgz#098898f74d5c1e86660dc112057b2d11227f1c81" + integrity sha512-HCCIb+CbJIAE6sXn5CjFQXMwkCClcOfPCzTlilJ8cUatfzwHlWQkbtV0zD338u9dZskwvuOYTuuaMaA8J5EI5A== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-unicode-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.22.5.tgz#ce7e7bb3ef208c4ff67e02a22816656256d7a183" + integrity sha512-028laaOKptN5vHJf9/Arr/HiJekMd41hOEZYvNsrsXqJ7YPYuX2bQxh31fkZzGmq3YqHRJzYFFAVYvKfMPKqyg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-unicode-sets-regex@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.22.5.tgz#77788060e511b708ffc7d42fdfbc5b37c3004e91" + integrity sha512-lhMfi4FC15j13eKrh3DnYHjpGj6UKQHtNKTbtc1igvAhRy4+kLhV07OpLcsN0VgDEw/MjAvJO4BdMJsHwMhzCg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" "@babel/preset-env@^7.18.6", "@babel/preset-env@^7.19.4": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.20.2.tgz#9b1642aa47bb9f43a86f9630011780dab7f86506" - integrity sha512-1G0efQEWR1EHkKvKHqbG+IN/QdgwfByUpM5V5QroDzGV2t3S/WXNQd693cHiHTlCFMpr9B6FkPFXDA2lQcKoDg== - dependencies: - "@babel/compat-data" "^7.20.1" - "@babel/helper-compilation-targets" "^7.20.0" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" - "@babel/plugin-proposal-async-generator-functions" "^7.20.1" - "@babel/plugin-proposal-class-properties" "^7.18.6" - "@babel/plugin-proposal-class-static-block" "^7.18.6" - "@babel/plugin-proposal-dynamic-import" "^7.18.6" - "@babel/plugin-proposal-export-namespace-from" "^7.18.9" - "@babel/plugin-proposal-json-strings" "^7.18.6" - "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" - "@babel/plugin-proposal-numeric-separator" "^7.18.6" - "@babel/plugin-proposal-object-rest-spread" "^7.20.2" - "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" - "@babel/plugin-proposal-optional-chaining" "^7.18.9" - "@babel/plugin-proposal-private-methods" "^7.18.6" - "@babel/plugin-proposal-private-property-in-object" "^7.18.6" - "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + version "7.22.14" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.22.14.tgz#1cbb468d899f64fa71c53446f13b7ff8c0005cc1" + integrity sha512-daodMIoVo+ol/g+//c/AH+szBkFj4STQUikvBijRGL72Ph+w+AMTSh55DUETe8KJlPlDT1k/mp7NBfOuiWmoig== + dependencies: + "@babel/compat-data" "^7.22.9" + "@babel/helper-compilation-targets" "^7.22.10" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-option" "^7.22.5" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.22.5" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.22.5" + "@babel/plugin-proposal-private-property-in-object" "7.21.0-placeholder-for-preset-env.2" "@babel/plugin-syntax-async-generators" "^7.8.4" "@babel/plugin-syntax-class-properties" "^7.12.13" "@babel/plugin-syntax-class-static-block" "^7.14.5" "@babel/plugin-syntax-dynamic-import" "^7.8.3" "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-import-assertions" "^7.20.0" + "@babel/plugin-syntax-import-assertions" "^7.22.5" + "@babel/plugin-syntax-import-attributes" "^7.22.5" + "@babel/plugin-syntax-import-meta" "^7.10.4" "@babel/plugin-syntax-json-strings" "^7.8.3" "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" @@ -1133,139 +1151,147 @@ "@babel/plugin-syntax-optional-chaining" "^7.8.3" "@babel/plugin-syntax-private-property-in-object" "^7.14.5" "@babel/plugin-syntax-top-level-await" "^7.14.5" - "@babel/plugin-transform-arrow-functions" "^7.18.6" - "@babel/plugin-transform-async-to-generator" "^7.18.6" - "@babel/plugin-transform-block-scoped-functions" "^7.18.6" - "@babel/plugin-transform-block-scoping" "^7.20.2" - "@babel/plugin-transform-classes" "^7.20.2" - "@babel/plugin-transform-computed-properties" "^7.18.9" - "@babel/plugin-transform-destructuring" "^7.20.2" - "@babel/plugin-transform-dotall-regex" "^7.18.6" - "@babel/plugin-transform-duplicate-keys" "^7.18.9" - "@babel/plugin-transform-exponentiation-operator" "^7.18.6" - "@babel/plugin-transform-for-of" "^7.18.8" - "@babel/plugin-transform-function-name" "^7.18.9" - "@babel/plugin-transform-literals" "^7.18.9" - "@babel/plugin-transform-member-expression-literals" "^7.18.6" - "@babel/plugin-transform-modules-amd" "^7.19.6" - "@babel/plugin-transform-modules-commonjs" "^7.19.6" - "@babel/plugin-transform-modules-systemjs" "^7.19.6" - "@babel/plugin-transform-modules-umd" "^7.18.6" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.19.1" - "@babel/plugin-transform-new-target" "^7.18.6" - "@babel/plugin-transform-object-super" "^7.18.6" - "@babel/plugin-transform-parameters" "^7.20.1" - "@babel/plugin-transform-property-literals" "^7.18.6" - "@babel/plugin-transform-regenerator" "^7.18.6" - "@babel/plugin-transform-reserved-words" "^7.18.6" - "@babel/plugin-transform-shorthand-properties" "^7.18.6" - "@babel/plugin-transform-spread" "^7.19.0" - "@babel/plugin-transform-sticky-regex" "^7.18.6" - "@babel/plugin-transform-template-literals" "^7.18.9" - "@babel/plugin-transform-typeof-symbol" "^7.18.9" - "@babel/plugin-transform-unicode-escapes" "^7.18.10" - "@babel/plugin-transform-unicode-regex" "^7.18.6" - "@babel/preset-modules" "^0.1.5" - "@babel/types" "^7.20.2" - babel-plugin-polyfill-corejs2 "^0.3.3" - babel-plugin-polyfill-corejs3 "^0.6.0" - babel-plugin-polyfill-regenerator "^0.4.1" - core-js-compat "^3.25.1" - semver "^6.3.0" - -"@babel/preset-modules@^0.1.5": - version "0.1.5" - resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" - integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6" + "@babel/plugin-transform-arrow-functions" "^7.22.5" + "@babel/plugin-transform-async-generator-functions" "^7.22.11" + "@babel/plugin-transform-async-to-generator" "^7.22.5" + "@babel/plugin-transform-block-scoped-functions" "^7.22.5" + "@babel/plugin-transform-block-scoping" "^7.22.10" + "@babel/plugin-transform-class-properties" "^7.22.5" + "@babel/plugin-transform-class-static-block" "^7.22.11" + "@babel/plugin-transform-classes" "^7.22.6" + "@babel/plugin-transform-computed-properties" "^7.22.5" + "@babel/plugin-transform-destructuring" "^7.22.10" + "@babel/plugin-transform-dotall-regex" "^7.22.5" + "@babel/plugin-transform-duplicate-keys" "^7.22.5" + "@babel/plugin-transform-dynamic-import" "^7.22.11" + "@babel/plugin-transform-exponentiation-operator" "^7.22.5" + "@babel/plugin-transform-export-namespace-from" "^7.22.11" + "@babel/plugin-transform-for-of" "^7.22.5" + "@babel/plugin-transform-function-name" "^7.22.5" + "@babel/plugin-transform-json-strings" "^7.22.11" + "@babel/plugin-transform-literals" "^7.22.5" + "@babel/plugin-transform-logical-assignment-operators" "^7.22.11" + "@babel/plugin-transform-member-expression-literals" "^7.22.5" + "@babel/plugin-transform-modules-amd" "^7.22.5" + "@babel/plugin-transform-modules-commonjs" "^7.22.11" + "@babel/plugin-transform-modules-systemjs" "^7.22.11" + "@babel/plugin-transform-modules-umd" "^7.22.5" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.22.5" + "@babel/plugin-transform-new-target" "^7.22.5" + "@babel/plugin-transform-nullish-coalescing-operator" "^7.22.11" + "@babel/plugin-transform-numeric-separator" "^7.22.11" + "@babel/plugin-transform-object-rest-spread" "^7.22.11" + "@babel/plugin-transform-object-super" "^7.22.5" + "@babel/plugin-transform-optional-catch-binding" "^7.22.11" + "@babel/plugin-transform-optional-chaining" "^7.22.12" + "@babel/plugin-transform-parameters" "^7.22.5" + "@babel/plugin-transform-private-methods" "^7.22.5" + "@babel/plugin-transform-private-property-in-object" "^7.22.11" + "@babel/plugin-transform-property-literals" "^7.22.5" + "@babel/plugin-transform-regenerator" "^7.22.10" + "@babel/plugin-transform-reserved-words" "^7.22.5" + "@babel/plugin-transform-shorthand-properties" "^7.22.5" + "@babel/plugin-transform-spread" "^7.22.5" + "@babel/plugin-transform-sticky-regex" "^7.22.5" + "@babel/plugin-transform-template-literals" "^7.22.5" + "@babel/plugin-transform-typeof-symbol" "^7.22.5" + "@babel/plugin-transform-unicode-escapes" "^7.22.10" + "@babel/plugin-transform-unicode-property-regex" "^7.22.5" + "@babel/plugin-transform-unicode-regex" "^7.22.5" + "@babel/plugin-transform-unicode-sets-regex" "^7.22.5" + "@babel/preset-modules" "0.1.6-no-external-plugins" + "@babel/types" "^7.22.11" + babel-plugin-polyfill-corejs2 "^0.4.5" + babel-plugin-polyfill-corejs3 "^0.8.3" + babel-plugin-polyfill-regenerator "^0.5.2" + core-js-compat "^3.31.0" + semver "^6.3.1" + +"@babel/preset-modules@0.1.6-no-external-plugins": + version "0.1.6-no-external-plugins" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz#ccb88a2c49c817236861fee7826080573b8a923a" + integrity sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" - "@babel/plugin-transform-dotall-regex" "^7.4.4" "@babel/types" "^7.4.4" esutils "^2.0.2" "@babel/preset-react@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.18.6.tgz#979f76d6277048dc19094c217b507f3ad517dd2d" - integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.22.5.tgz#c4d6058fbf80bccad02dd8c313a9aaa67e3c3dd6" + integrity sha512-M+Is3WikOpEJHgR385HbuCITPTaPRaNkibTEa9oiofmJvIsrceb4yp9RL9Kb+TE8LznmeyZqpP+Lopwcx59xPQ== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-transform-react-display-name" "^7.18.6" - "@babel/plugin-transform-react-jsx" "^7.18.6" - "@babel/plugin-transform-react-jsx-development" "^7.18.6" - "@babel/plugin-transform-react-pure-annotations" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-option" "^7.22.5" + "@babel/plugin-transform-react-display-name" "^7.22.5" + "@babel/plugin-transform-react-jsx" "^7.22.5" + "@babel/plugin-transform-react-jsx-development" "^7.22.5" + "@babel/plugin-transform-react-pure-annotations" "^7.22.5" "@babel/preset-typescript@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.18.6.tgz#ce64be3e63eddc44240c6358daefac17b3186399" - integrity sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ== + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.22.11.tgz#f218cd0345524ac888aa3dc32f029de5b064b575" + integrity sha512-tWY5wyCZYBGY7IlalfKI1rLiGlIfnwsRHZqlky0HVv8qviwQ1Uo/05M6+s+TcTCVa6Bmoo2uJW5TMFX6Wa4qVg== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-transform-typescript" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-option" "^7.22.5" + "@babel/plugin-syntax-jsx" "^7.22.5" + "@babel/plugin-transform-modules-commonjs" "^7.22.11" + "@babel/plugin-transform-typescript" "^7.22.11" + +"@babel/regjsgen@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" + integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== "@babel/runtime-corejs3@^7.18.6": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.18.9.tgz#7bacecd1cb2dd694eacd32a91fcf7021c20770ae" - integrity sha512-qZEWeccZCrHA2Au4/X05QW5CMdm4VjUDCrGq5gf1ZDcM4hRqreKrtwAn7yci9zfgAS9apvnsFXiGBHBAxZdK9A== - dependencies: - core-js-pure "^3.20.2" - regenerator-runtime "^0.13.4" - -"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.18.3", "@babel/runtime@^7.18.6", "@babel/runtime@^7.8.4": - version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.18.9.tgz#b4fcfce55db3d2e5e080d2490f608a3b9f407f4a" - integrity sha512-lkqXDcvlFT5rvEjiu6+QYO+1GXrEHRo2LOtS7E4GtX5ESIZOgepqsZBVIj6Pv+a6zqsya9VCgiK1KAK4BvJDAw== - dependencies: - regenerator-runtime "^0.13.4" - -"@babel/runtime@^7.10.1", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.16.7", "@babel/runtime@^7.18.0", "@babel/runtime@^7.20.0": - version "7.21.0" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.21.0.tgz#5b55c9d394e5fcf304909a8b00c07dc217b56673" - integrity sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw== - dependencies: - regenerator-runtime "^0.13.11" - -"@babel/runtime@^7.13.10": - version "7.21.5" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.21.5.tgz#8492dddda9644ae3bda3b45eabe87382caee7200" - integrity sha512-8jI69toZqqcsnqGGqwGS4Qb1VwLOEp4hz+CXPywcvjs60u3B4Pom/U/7rm4W8tMOYEB+E9wgD0mW1l3r8qlI9Q== - dependencies: - regenerator-runtime "^0.13.11" - -"@babel/template@^7.12.7", "@babel/template@^7.18.10", "@babel/template@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.20.7.tgz#a15090c2839a83b02aa996c0b4994005841fd5a8" - integrity sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/parser" "^7.20.7" - "@babel/types" "^7.20.7" - -"@babel/traverse@^7.12.9", "@babel/traverse@^7.18.13", "@babel/traverse@^7.18.8", "@babel/traverse@^7.20.10", "@babel/traverse@^7.20.5", "@babel/traverse@^7.20.7": - version "7.20.10" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.20.10.tgz#2bf98239597fcec12f842756f186a9dde6d09230" - integrity sha512-oSf1juCgymrSez8NI4A2sr4+uB/mFd9MXplYGPEBnfAuWmmyeVcHa6xLPiaRBcXkcb/28bgxmQLTVwFKE1yfsg== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.20.7" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.20.7" - "@babel/types" "^7.20.7" + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.22.11.tgz#bf65b846cb4a03e1594dba9850c4632a992ddc04" + integrity sha512-NhfzUbdWbiE6fCFypbWCPu6AR8xre31EOPF7wwAIJEvGQ2avov04eymayWinCuyXmV1b0+jzoXP/HYzzUYdvwg== + dependencies: + core-js-pure "^3.30.2" + regenerator-runtime "^0.14.0" + +"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.3", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.16.7", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.18.6", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.13", "@babel/runtime@^7.20.7", "@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.8.4": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.11.tgz#7a9ba3bbe406ad6f9e8dd4da2ece453eb23a77a4" + integrity sha512-ee7jVNlWN09+KftVOu9n7S8gQzD/Z6hN/I8VBRXW4P1+Xe7kJGXMwu8vds4aGIMHZnNbdpSWCfZZtinytpcAvA== + dependencies: + regenerator-runtime "^0.14.0" + +"@babel/template@^7.12.7", "@babel/template@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.5.tgz#0c8c4d944509875849bd0344ff0050756eefc6ec" + integrity sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw== + dependencies: + "@babel/code-frame" "^7.22.5" + "@babel/parser" "^7.22.5" + "@babel/types" "^7.22.5" + +"@babel/traverse@^7.12.9", "@babel/traverse@^7.18.8", "@babel/traverse@^7.22.11": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.11.tgz#71ebb3af7a05ff97280b83f05f8865ac94b2027c" + integrity sha512-mzAenteTfomcB7mfPtyi+4oe5BZ6MXxWcn4CX+h4IRJ+OOGXBrWU6jDQavkQI9Vuc5P+donFabBfFCcmWka9lQ== + dependencies: + "@babel/code-frame" "^7.22.10" + "@babel/generator" "^7.22.10" + "@babel/helper-environment-visitor" "^7.22.5" + "@babel/helper-function-name" "^7.22.5" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.22.11" + "@babel/types" "^7.22.11" debug "^4.1.0" globals "^11.1.0" -"@babel/types@^7.12.7", "@babel/types@^7.18.13", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.19.0", "@babel/types@^7.20.0", "@babel/types@^7.20.2", "@babel/types@^7.20.5", "@babel/types@^7.20.7", "@babel/types@^7.4.4": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.20.7.tgz#54ec75e252318423fc07fb644dc6a58a64c09b7f" - integrity sha512-69OnhBxSSgK0OzTJai4kyPDiKTIe3j+ctaHdIGVbRahTLAT7L3R9oeXHC2aVSuGYt3cVnoAMDmOCgJ2yaiLMvg== +"@babel/types@^7.12.7", "@babel/types@^7.20.0", "@babel/types@^7.22.10", "@babel/types@^7.22.11", "@babel/types@^7.22.5", "@babel/types@^7.4.4": + version "7.22.11" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.11.tgz#0e65a6a1d4d9cbaa892b2213f6159485fe632ea2" + integrity sha512-siazHiGuZRz9aB9NpHy9GOs9xiQPKnMzgdr493iI1M67vRXpnEq8ZOOKzezC5q7zwuQ6sDhdSp4SD9ixKSqKZg== dependencies: - "@babel/helper-string-parser" "^7.19.4" - "@babel/helper-validator-identifier" "^7.19.1" + "@babel/helper-string-parser" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" "@colors/colors@1.5.0": @@ -1273,107 +1299,42 @@ resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== -"@ctrl/tinycolor@^3.4.0": - version "3.4.1" - resolved "https://registry.yarnpkg.com/@ctrl/tinycolor/-/tinycolor-3.4.1.tgz#75b4c27948c81e88ccd3a8902047bcd797f38d32" - integrity sha512-ej5oVy6lykXsvieQtqZxCOaLT+xD4+QNarq78cIYISHmZXshCvROLudpQN3lfL8G0NL7plMSSK+zlyvCaIJ4Iw== +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== + dependencies: + "@jridgewell/trace-mapping" "0.3.9" -"@docsearch/css@3.3.1": - version "3.3.1" - resolved "https://registry.yarnpkg.com/@docsearch/css/-/css-3.3.1.tgz#32041581bffb1a834072fd21ca66d1dd9f016098" - integrity sha512-nznHXeFHpAYjyaSNFNFpU+IJPjQA7AINM8ONjDx/Zx4O/pGAvqwgmcLNc7zR8qXRutqnzLo06yN63xFn36KFBw== +"@ctrl/tinycolor@^3.4.0", "@ctrl/tinycolor@^3.6.0": + version "3.6.1" + resolved "https://registry.yarnpkg.com/@ctrl/tinycolor/-/tinycolor-3.6.1.tgz#b6c75a56a1947cc916ea058772d666a2c8932f31" + integrity sha512-SITSV6aIXsuVNV3f3O0f2n/cgyEDWoSqtZMYiAmcsYHydcKrOz3gUxB/iXd/Qf08+IZX4KpgNbvUdMBmWz+kcA== -"@docsearch/react@^3.1.1": - version "3.3.1" - resolved "https://registry.yarnpkg.com/@docsearch/react/-/react-3.3.1.tgz#47ce4a267a9daf1b5d913b979284b4f624088003" - integrity sha512-wdeQBODPkue6yVEEg4ntt+TiGJ6iXMBUNjBQJ0s1WVoc1OdcCnks/lkQ5LEfXETYR/q9QSbCCBnMjvnSoILaag== - dependencies: - "@algolia/autocomplete-core" "1.7.2" - "@algolia/autocomplete-preset-algolia" "1.7.2" - "@docsearch/css" "3.3.1" - algoliasearch "^4.0.0" +"@discoveryjs/json-ext@0.5.7": + version "0.5.7" + resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70" + integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw== -"@docusaurus/core@2.2.0", "@docusaurus/core@^2.1.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-2.2.0.tgz#64c9ee31502c23b93c869f8188f73afaf5fd4867" - integrity sha512-Vd6XOluKQqzG12fEs9prJgDtyn6DPok9vmUWDR2E6/nV5Fl9SVkhEQOBxwObjk3kQh7OY7vguFaLh0jqdApWsA== +"@docsearch/css@3.5.2": + version "3.5.2" + resolved "https://registry.yarnpkg.com/@docsearch/css/-/css-3.5.2.tgz#610f47b48814ca94041df969d9fcc47b91fc5aac" + integrity sha512-SPiDHaWKQZpwR2siD0KQUwlStvIAnEyK6tAE2h2Wuoq8ue9skzhlyVQ1ddzOxX6khULnAALDiR/isSF3bnuciA== + +"@docsearch/react@^3.1.1": + version "3.5.2" + resolved "https://registry.yarnpkg.com/@docsearch/react/-/react-3.5.2.tgz#2e6bbee00eb67333b64906352734da6aef1232b9" + integrity sha512-9Ahcrs5z2jq/DcAvYtvlqEBHImbm4YJI8M9y0x6Tqg598P40HTEkX7hsMcIuThI+hTFxRGZ9hll0Wygm2yEjng== dependencies: - "@babel/core" "^7.18.6" - "@babel/generator" "^7.18.7" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-transform-runtime" "^7.18.6" - "@babel/preset-env" "^7.18.6" - "@babel/preset-react" "^7.18.6" - "@babel/preset-typescript" "^7.18.6" - "@babel/runtime" "^7.18.6" - "@babel/runtime-corejs3" "^7.18.6" - "@babel/traverse" "^7.18.8" - "@docusaurus/cssnano-preset" "2.2.0" - "@docusaurus/logger" "2.2.0" - "@docusaurus/mdx-loader" "2.2.0" - "@docusaurus/react-loadable" "5.5.2" - "@docusaurus/utils" "2.2.0" - "@docusaurus/utils-common" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" - "@slorber/static-site-generator-webpack-plugin" "^4.0.7" - "@svgr/webpack" "^6.2.1" - autoprefixer "^10.4.7" - babel-loader "^8.2.5" - babel-plugin-dynamic-import-node "^2.3.3" - boxen "^6.2.1" - chalk "^4.1.2" - chokidar "^3.5.3" - clean-css "^5.3.0" - cli-table3 "^0.6.2" - combine-promises "^1.1.0" - commander "^5.1.0" - copy-webpack-plugin "^11.0.0" - core-js "^3.23.3" - css-loader "^6.7.1" - css-minimizer-webpack-plugin "^4.0.0" - cssnano "^5.1.12" - del "^6.1.1" - detect-port "^1.3.0" - escape-html "^1.0.3" - eta "^1.12.3" - file-loader "^6.2.0" - fs-extra "^10.1.0" - html-minifier-terser "^6.1.0" - html-tags "^3.2.0" - html-webpack-plugin "^5.5.0" - import-fresh "^3.3.0" - leven "^3.1.0" - lodash "^4.17.21" - mini-css-extract-plugin "^2.6.1" - postcss "^8.4.14" - postcss-loader "^7.0.0" - prompts "^2.4.2" - react-dev-utils "^12.0.1" - react-helmet-async "^1.3.0" - react-loadable "npm:@docusaurus/react-loadable@5.5.2" - react-loadable-ssr-addon-v5-slorber "^1.0.1" - react-router "^5.3.3" - react-router-config "^5.1.1" - react-router-dom "^5.3.3" - rtl-detect "^1.0.4" - semver "^7.3.7" - serve-handler "^6.1.3" - shelljs "^0.8.5" - terser-webpack-plugin "^5.3.3" - tslib "^2.4.0" - update-notifier "^5.1.0" - url-loader "^4.1.1" - wait-on "^6.0.1" - webpack "^5.73.0" - webpack-bundle-analyzer "^4.5.0" - webpack-dev-server "^4.9.3" - webpack-merge "^5.8.0" - webpackbar "^5.0.2" + "@algolia/autocomplete-core" "1.9.3" + "@algolia/autocomplete-preset-algolia" "1.9.3" + "@docsearch/css" "3.5.2" + algoliasearch "^4.19.1" -"@docusaurus/core@2.3.1": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-2.3.1.tgz#32849f2ffd2f086a4e55739af8c4195c5eb386f2" - integrity sha512-0Jd4jtizqnRAr7svWaBbbrCCN8mzBNd2xFLoT/IM7bGfFie5y58oz97KzXliwiLY3zWjqMXjQcuP1a5VgCv2JA== +"@docusaurus/core@2.4.1", "@docusaurus/core@^2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-2.4.1.tgz#4b8ff5766131ce3fbccaad0b1daf2ad4dc76f62d" + integrity sha512-SNsY7PshK3Ri7vtsLXVeAJGS50nJN3RgF836zkyUfAD01Fq+sAk5EwWgLw+nnm5KVNGDu7PRR2kRGDsWvqpo0g== dependencies: "@babel/core" "^7.18.6" "@babel/generator" "^7.18.7" @@ -1385,13 +1346,13 @@ "@babel/runtime" "^7.18.6" "@babel/runtime-corejs3" "^7.18.6" "@babel/traverse" "^7.18.8" - "@docusaurus/cssnano-preset" "2.3.1" - "@docusaurus/logger" "2.3.1" - "@docusaurus/mdx-loader" "2.3.1" + "@docusaurus/cssnano-preset" "2.4.1" + "@docusaurus/logger" "2.4.1" + "@docusaurus/mdx-loader" "2.4.1" "@docusaurus/react-loadable" "5.5.2" - "@docusaurus/utils" "2.3.1" - "@docusaurus/utils-common" "2.3.1" - "@docusaurus/utils-validation" "2.3.1" + "@docusaurus/utils" "2.4.1" + "@docusaurus/utils-common" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" "@slorber/static-site-generator-webpack-plugin" "^4.0.7" "@svgr/webpack" "^6.2.1" autoprefixer "^10.4.7" @@ -1447,85 +1408,44 @@ webpack-merge "^5.8.0" webpackbar "^5.0.2" -"@docusaurus/cssnano-preset@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-2.2.0.tgz#fc05044659051ae74ab4482afcf4a9936e81d523" - integrity sha512-mAAwCo4n66TMWBH1kXnHVZsakW9VAXJzTO4yZukuL3ro4F+JtkMwKfh42EG75K/J/YIFQG5I/Bzy0UH/hFxaTg== - dependencies: - cssnano-preset-advanced "^5.3.8" - postcss "^8.4.14" - postcss-sort-media-queries "^4.2.1" - tslib "^2.4.0" - -"@docusaurus/cssnano-preset@2.3.1": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-2.3.1.tgz#e042487655e3e062417855e12edb3f6eee8f5ecb" - integrity sha512-7mIhAROES6CY1GmCjR4CZkUfjTL6B3u6rKHK0ChQl2d1IevYXq/k/vFgvOrJfcKxiObpMnE9+X6R2Wt1KqxC6w== +"@docusaurus/cssnano-preset@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-2.4.1.tgz#eacadefb1e2e0f59df3467a0fe83e4ff79eed163" + integrity sha512-ka+vqXwtcW1NbXxWsh6yA1Ckii1klY9E53cJ4O9J09nkMBgrNX3iEFED1fWdv8wf4mJjvGi5RLZ2p9hJNjsLyQ== dependencies: cssnano-preset-advanced "^5.3.8" postcss "^8.4.14" postcss-sort-media-queries "^4.2.1" tslib "^2.4.0" -"@docusaurus/logger@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/logger/-/logger-2.2.0.tgz#ea2f7feda7b8675485933b87f06d9c976d17423f" - integrity sha512-DF3j1cA5y2nNsu/vk8AG7xwpZu6f5MKkPPMaaIbgXLnWGfm6+wkOeW7kNrxnM95YOhKUkJUophX69nGUnLsm0A== - dependencies: - chalk "^4.1.2" - tslib "^2.4.0" - -"@docusaurus/logger@2.3.1": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/logger/-/logger-2.3.1.tgz#d76aefb452e3734b4e0e645efc6cbfc0aae52869" - integrity sha512-2lAV/olKKVr9qJhfHFCaqBIl8FgYjbUFwgUnX76+cULwQYss+42ZQ3grHGFvI0ocN2X55WcYe64ellQXz7suqg== +"@docusaurus/logger@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/logger/-/logger-2.4.1.tgz#4d2c0626b40752641f9fdd93ad9b5a7a0792f767" + integrity sha512-5h5ysIIWYIDHyTVd8BjheZmQZmEgWDR54aQ1BX9pjFfpyzFo5puKXKYrYJXbjEHGyVhEzmB9UXwbxGfaZhOjcg== dependencies: chalk "^4.1.2" tslib "^2.4.0" -"@docusaurus/lqip-loader@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/lqip-loader/-/lqip-loader-2.2.0.tgz#46ccf0da970cd7817c885e09ff14d8bccd64d434" - integrity sha512-nER3YokwkkNA1c2bAQzyqH8nfdbzkNtgwbuWvwnNuxW6kyhMopLcIy9qQYpSzcA0S7xcfj7+ysdB/DX7+nWoMw== +"@docusaurus/lqip-loader@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/lqip-loader/-/lqip-loader-2.4.1.tgz#5e6a279982af898e646f042097fc4053fa15b4b8" + integrity sha512-XJ0z/xSx5HtAQ+/xBoAiRZ7DY9zEP6IImAKlAk6RxuFzyB4HT8eINWN+LwLnOsTh5boIj37JCX+T76bH0ieULA== dependencies: - "@docusaurus/logger" "2.2.0" + "@docusaurus/logger" "2.4.1" file-loader "^6.2.0" lodash "^4.17.21" sharp "^0.30.7" tslib "^2.4.0" -"@docusaurus/mdx-loader@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-2.2.0.tgz#fd558f429e5d9403d284bd4214e54d9768b041a0" - integrity sha512-X2bzo3T0jW0VhUU+XdQofcEeozXOTmKQMvc8tUnWRdTnCvj4XEcBVdC3g+/jftceluiwSTNRAX4VBOJdNt18jA== - dependencies: - "@babel/parser" "^7.18.8" - "@babel/traverse" "^7.18.8" - "@docusaurus/logger" "2.2.0" - "@docusaurus/utils" "2.2.0" - "@mdx-js/mdx" "^1.6.22" - escape-html "^1.0.3" - file-loader "^6.2.0" - fs-extra "^10.1.0" - image-size "^1.0.1" - mdast-util-to-string "^2.0.0" - remark-emoji "^2.2.0" - stringify-object "^3.3.0" - tslib "^2.4.0" - unified "^9.2.2" - unist-util-visit "^2.0.3" - url-loader "^4.1.1" - webpack "^5.73.0" - -"@docusaurus/mdx-loader@2.3.1": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-2.3.1.tgz#7ec6acee5eff0a280e1b399ea4dd690b15a793f7" - integrity sha512-Gzga7OsxQRpt3392K9lv/bW4jGppdLFJh3luKRknCKSAaZrmVkOQv2gvCn8LAOSZ3uRg5No7AgYs/vpL8K94lA== +"@docusaurus/mdx-loader@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-2.4.1.tgz#6425075d7fc136dbfdc121349060cedd64118393" + integrity sha512-4KhUhEavteIAmbBj7LVFnrVYDiU51H5YWW1zY6SmBSte/YLhDutztLTBE0PQl1Grux1jzUJeaSvAzHpTn6JJDQ== dependencies: "@babel/parser" "^7.18.8" "@babel/traverse" "^7.18.8" - "@docusaurus/logger" "2.3.1" - "@docusaurus/utils" "2.3.1" + "@docusaurus/logger" "2.4.1" + "@docusaurus/utils" "2.4.1" "@mdx-js/mdx" "^1.6.22" escape-html "^1.0.3" file-loader "^6.2.0" @@ -1540,27 +1460,13 @@ url-loader "^4.1.1" webpack "^5.73.0" -"@docusaurus/module-type-aliases@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/module-type-aliases/-/module-type-aliases-2.2.0.tgz#1e23e54a1bbb6fde1961e4fa395b1b69f4803ba5" - integrity sha512-wDGW4IHKoOr9YuJgy7uYuKWrDrSpsUSDHLZnWQYM9fN7D5EpSmYHjFruUpKWVyxLpD/Wh0rW8hYZwdjJIQUQCQ== - dependencies: - "@docusaurus/react-loadable" "5.5.2" - "@docusaurus/types" "2.2.0" - "@types/history" "^4.7.11" - "@types/react" "*" - "@types/react-router-config" "*" - "@types/react-router-dom" "*" - react-helmet-async "*" - react-loadable "npm:@docusaurus/react-loadable@5.5.2" - -"@docusaurus/module-type-aliases@2.3.1": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/module-type-aliases/-/module-type-aliases-2.3.1.tgz#986186200818fed999be2e18d6c698eaf4683a33" - integrity sha512-6KkxfAVOJqIUynTRb/tphYCl+co3cP0PlHiMDbi+SzmYxMdgIrwYqH9yAnGSDoN6Jk2ZE/JY/Azs/8LPgKP48A== +"@docusaurus/module-type-aliases@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/module-type-aliases/-/module-type-aliases-2.4.1.tgz#38b3c2d2ae44bea6d57506eccd84280216f0171c" + integrity sha512-gLBuIFM8Dp2XOCWffUDSjtxY7jQgKvYujt7Mx5s4FCTfoL5dN1EVbnrn+O2Wvh8b0a77D57qoIDY7ghgmatR1A== dependencies: "@docusaurus/react-loadable" "5.5.2" - "@docusaurus/types" "2.3.1" + "@docusaurus/types" "2.4.1" "@types/history" "^4.7.11" "@types/react" "*" "@types/react-router-config" "*" @@ -1568,18 +1474,18 @@ react-helmet-async "*" react-loadable "npm:@docusaurus/react-loadable@5.5.2" -"@docusaurus/plugin-content-blog@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.2.0.tgz#dc55982e76771f4e678ac10e26d10e1da2011dc1" - integrity sha512-0mWBinEh0a5J2+8ZJXJXbrCk1tSTNf7Nm4tYAl5h2/xx+PvH/Bnu0V+7mMljYm/1QlDYALNIIaT/JcoZQFUN3w== - dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/logger" "2.2.0" - "@docusaurus/mdx-loader" "2.2.0" - "@docusaurus/types" "2.2.0" - "@docusaurus/utils" "2.2.0" - "@docusaurus/utils-common" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" +"@docusaurus/plugin-content-blog@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.4.1.tgz#c705a8b1a36a34f181dcf43b7770532e4dcdc4a3" + integrity sha512-E2i7Knz5YIbE1XELI6RlTnZnGgS52cUO4BlCiCUCvQHbR+s1xeIWz4C6BtaVnlug0Ccz7nFSksfwDpVlkujg5Q== + dependencies: + "@docusaurus/core" "2.4.1" + "@docusaurus/logger" "2.4.1" + "@docusaurus/mdx-loader" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils" "2.4.1" + "@docusaurus/utils-common" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" cheerio "^1.0.0-rc.12" feed "^4.2.2" fs-extra "^10.1.0" @@ -1590,18 +1496,18 @@ utility-types "^3.10.0" webpack "^5.73.0" -"@docusaurus/plugin-content-docs@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.2.0.tgz#0fcb85226fcdb80dc1e2d4a36ef442a650dcc84d" - integrity sha512-BOazBR0XjzsHE+2K1wpNxz5QZmrJgmm3+0Re0EVPYFGW8qndCWGNtXW/0lGKhecVPML8yyFeAmnUCIs7xM2wPw== - dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/logger" "2.2.0" - "@docusaurus/mdx-loader" "2.2.0" - "@docusaurus/module-type-aliases" "2.2.0" - "@docusaurus/types" "2.2.0" - "@docusaurus/utils" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" +"@docusaurus/plugin-content-docs@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.4.1.tgz#ed94d9721b5ce7a956fb01cc06c40d8eee8dfca7" + integrity sha512-Lo7lSIcpswa2Kv4HEeUcGYqaasMUQNpjTXpV0N8G6jXgZaQurqp7E8NGYeGbDXnb48czmHWbzDL4S3+BbK0VzA== + dependencies: + "@docusaurus/core" "2.4.1" + "@docusaurus/logger" "2.4.1" + "@docusaurus/mdx-loader" "2.4.1" + "@docusaurus/module-type-aliases" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" "@types/react-router-config" "^5.0.6" combine-promises "^1.1.0" fs-extra "^10.1.0" @@ -1612,123 +1518,112 @@ utility-types "^3.10.0" webpack "^5.73.0" -"@docusaurus/plugin-content-docs@^2.1.0": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.3.1.tgz#feae1555479558a55182f22f8a07acc5e0d7444d" - integrity sha512-DxztTOBEruv7qFxqUtbsqXeNcHqcVEIEe+NQoI1oi2DBmKBhW/o0MIal8lt+9gvmpx3oYtlwmLOOGepxZgJGkw== - dependencies: - "@docusaurus/core" "2.3.1" - "@docusaurus/logger" "2.3.1" - "@docusaurus/mdx-loader" "2.3.1" - "@docusaurus/module-type-aliases" "2.3.1" - "@docusaurus/types" "2.3.1" - "@docusaurus/utils" "2.3.1" - "@docusaurus/utils-validation" "2.3.1" - "@types/react-router-config" "^5.0.6" - combine-promises "^1.1.0" +"@docusaurus/plugin-content-pages@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.4.1.tgz#c534f7e49967699a45bbe67050d1605ebbf3d285" + integrity sha512-/UjuH/76KLaUlL+o1OvyORynv6FURzjurSjvn2lbWTFc4tpYY2qLYTlKpTCBVPhlLUQsfyFnshEJDLmPneq2oA== + dependencies: + "@docusaurus/core" "2.4.1" + "@docusaurus/mdx-loader" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" fs-extra "^10.1.0" - import-fresh "^3.3.0" - js-yaml "^4.1.0" - lodash "^4.17.21" tslib "^2.4.0" - utility-types "^3.10.0" webpack "^5.73.0" -"@docusaurus/plugin-content-pages@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.2.0.tgz#e3f40408787bbe229545dd50595f87e1393bc3ae" - integrity sha512-+OTK3FQHk5WMvdelz8v19PbEbx+CNT6VSpx7nVOvMNs5yJCKvmqBJBQ2ZSxROxhVDYn+CZOlmyrC56NSXzHf6g== - dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/mdx-loader" "2.2.0" - "@docusaurus/types" "2.2.0" - "@docusaurus/utils" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" +"@docusaurus/plugin-debug@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-debug/-/plugin-debug-2.4.1.tgz#461a2c77b0c5a91b2c05257c8f9585412aaa59dc" + integrity sha512-7Yu9UPzRShlrH/G8btOpR0e6INFZr0EegWplMjOqelIwAcx3PKyR8mgPTxGTxcqiYj6hxSCRN0D8R7YrzImwNA== + dependencies: + "@docusaurus/core" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils" "2.4.1" fs-extra "^10.1.0" + react-json-view "^1.21.3" tslib "^2.4.0" - webpack "^5.73.0" -"@docusaurus/plugin-debug@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-debug/-/plugin-debug-2.2.0.tgz#b38741d2c492f405fee01ee0ef2e0029cedb689a" - integrity sha512-p9vOep8+7OVl6r/NREEYxf4HMAjV8JMYJ7Bos5fCFO0Wyi9AZEo0sCTliRd7R8+dlJXZEgcngSdxAUo/Q+CJow== +"@docusaurus/plugin-google-analytics@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.4.1.tgz#30de1c35773bf9d52bb2d79b201b23eb98022613" + integrity sha512-dyZJdJiCoL+rcfnm0RPkLt/o732HvLiEwmtoNzOoz9MSZz117UH2J6U2vUDtzUzwtFLIf32KkeyzisbwUCgcaQ== dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/types" "2.2.0" - "@docusaurus/utils" "2.2.0" - fs-extra "^10.1.0" - react-json-view "^1.21.3" + "@docusaurus/core" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" tslib "^2.4.0" -"@docusaurus/plugin-google-analytics@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.2.0.tgz#63c7137eff5a1208d2059fea04b5207c037d7954" - integrity sha512-+eZVVxVeEnV5nVQJdey9ZsfyEVMls6VyWTIj8SmX0k5EbqGvnIfET+J2pYEuKQnDIHxy+syRMoRM6AHXdHYGIg== +"@docusaurus/plugin-google-gtag@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.4.1.tgz#6a3eb91022714735e625c7ca70ef5188fa7bd0dc" + integrity sha512-mKIefK+2kGTQBYvloNEKtDmnRD7bxHLsBcxgnbt4oZwzi2nxCGjPX6+9SQO2KCN5HZbNrYmGo5GJfMgoRvy6uA== dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/types" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" + "@docusaurus/core" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" tslib "^2.4.0" -"@docusaurus/plugin-google-gtag@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.2.0.tgz#7b086d169ac5fe9a88aca10ab0fd2bf00c6c6b12" - integrity sha512-6SOgczP/dYdkqUMGTRqgxAS1eTp6MnJDAQMy8VCF1QKbWZmlkx4agHDexihqmYyCujTYHqDAhm1hV26EET54NQ== +"@docusaurus/plugin-google-tag-manager@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-2.4.1.tgz#b99f71aec00b112bbf509ef2416e404a95eb607e" + integrity sha512-Zg4Ii9CMOLfpeV2nG74lVTWNtisFaH9QNtEw48R5QE1KIwDBdTVaiSA18G1EujZjrzJJzXN79VhINSbOJO/r3g== dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/types" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" + "@docusaurus/core" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" tslib "^2.4.0" -"@docusaurus/plugin-ideal-image@^2.1.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-ideal-image/-/plugin-ideal-image-2.2.0.tgz#bd2fc4d8f8c4a4526b288297d892cb76e61e1382" - integrity sha512-1tnyPotxLEglt497nmccxWOpAA6ulMf4nc2k8tReEljmwHebefWV9wbK1RK/44Na8GiGb709/Zs+HHdNRied8w== +"@docusaurus/plugin-ideal-image@^2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-ideal-image/-/plugin-ideal-image-2.4.1.tgz#110e9814ad3af66235c849d2e00c9e84f552c961" + integrity sha512-jxvgCGPmHxdae2Y2uskzxIbMCA4WLTfzkufsLbD4mEAjCRIkt6yzux6q5kqKTrO+AxzpANVcJNGmaBtKZGv5aw== dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/lqip-loader" "2.2.0" + "@docusaurus/core" "2.4.1" + "@docusaurus/lqip-loader" "2.4.1" "@docusaurus/responsive-loader" "^1.7.0" - "@docusaurus/theme-translations" "2.2.0" - "@docusaurus/types" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" + "@docusaurus/theme-translations" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" "@endiliey/react-ideal-image" "^0.0.11" react-waypoint "^10.3.0" sharp "^0.30.7" tslib "^2.4.0" webpack "^5.73.0" -"@docusaurus/plugin-sitemap@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.2.0.tgz#876da60937886032d63143253d420db6a4b34773" - integrity sha512-0jAmyRDN/aI265CbWZNZuQpFqiZuo+5otk2MylU9iVrz/4J7gSc+ZJ9cy4EHrEsW7PV8s1w18hIEsmcA1YgkKg== - dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/logger" "2.2.0" - "@docusaurus/types" "2.2.0" - "@docusaurus/utils" "2.2.0" - "@docusaurus/utils-common" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" +"@docusaurus/plugin-sitemap@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.4.1.tgz#8a7a76ed69dc3e6b4474b6abb10bb03336a9de6d" + integrity sha512-lZx+ijt/+atQ3FVE8FOHV/+X3kuok688OydDXrqKRJyXBJZKgGjA2Qa8RjQ4f27V2woaXhtnyrdPop/+OjVMRg== + dependencies: + "@docusaurus/core" "2.4.1" + "@docusaurus/logger" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils" "2.4.1" + "@docusaurus/utils-common" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" fs-extra "^10.1.0" sitemap "^7.1.1" tslib "^2.4.0" -"@docusaurus/preset-classic@^2.1.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/preset-classic/-/preset-classic-2.2.0.tgz#bece5a043eeb74430f7c6c7510000b9c43669eb7" - integrity sha512-yKIWPGNx7BT8v2wjFIWvYrS+nvN04W+UameSFf8lEiJk6pss0kL6SG2MRvyULiI3BDxH+tj6qe02ncpSPGwumg== - dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/plugin-content-blog" "2.2.0" - "@docusaurus/plugin-content-docs" "2.2.0" - "@docusaurus/plugin-content-pages" "2.2.0" - "@docusaurus/plugin-debug" "2.2.0" - "@docusaurus/plugin-google-analytics" "2.2.0" - "@docusaurus/plugin-google-gtag" "2.2.0" - "@docusaurus/plugin-sitemap" "2.2.0" - "@docusaurus/theme-classic" "2.2.0" - "@docusaurus/theme-common" "2.2.0" - "@docusaurus/theme-search-algolia" "2.2.0" - "@docusaurus/types" "2.2.0" +"@docusaurus/preset-classic@^2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/preset-classic/-/preset-classic-2.4.1.tgz#072f22d0332588e9c5f512d4bded8d7c99f91497" + integrity sha512-P4//+I4zDqQJ+UDgoFrjIFaQ1MeS9UD1cvxVQaI6O7iBmiHQm0MGROP1TbE7HlxlDPXFJjZUK3x3cAoK63smGQ== + dependencies: + "@docusaurus/core" "2.4.1" + "@docusaurus/plugin-content-blog" "2.4.1" + "@docusaurus/plugin-content-docs" "2.4.1" + "@docusaurus/plugin-content-pages" "2.4.1" + "@docusaurus/plugin-debug" "2.4.1" + "@docusaurus/plugin-google-analytics" "2.4.1" + "@docusaurus/plugin-google-gtag" "2.4.1" + "@docusaurus/plugin-google-tag-manager" "2.4.1" + "@docusaurus/plugin-sitemap" "2.4.1" + "@docusaurus/theme-classic" "2.4.1" + "@docusaurus/theme-common" "2.4.1" + "@docusaurus/theme-search-algolia" "2.4.1" + "@docusaurus/types" "2.4.1" "@docusaurus/react-loadable@5.5.2", "react-loadable@npm:@docusaurus/react-loadable@5.5.2": version "5.5.2" @@ -1745,27 +1640,27 @@ dependencies: loader-utils "^2.0.0" -"@docusaurus/theme-classic@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-2.2.0.tgz#a048bb1bc077dee74b28bec25f4b84b481863742" - integrity sha512-kjbg/qJPwZ6H1CU/i9d4l/LcFgnuzeiGgMQlt6yPqKo0SOJIBMPuz7Rnu3r/WWbZFPi//o8acclacOzmXdUUEg== - dependencies: - "@docusaurus/core" "2.2.0" - "@docusaurus/mdx-loader" "2.2.0" - "@docusaurus/module-type-aliases" "2.2.0" - "@docusaurus/plugin-content-blog" "2.2.0" - "@docusaurus/plugin-content-docs" "2.2.0" - "@docusaurus/plugin-content-pages" "2.2.0" - "@docusaurus/theme-common" "2.2.0" - "@docusaurus/theme-translations" "2.2.0" - "@docusaurus/types" "2.2.0" - "@docusaurus/utils" "2.2.0" - "@docusaurus/utils-common" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" +"@docusaurus/theme-classic@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-2.4.1.tgz#0060cb263c1a73a33ac33f79bb6bc2a12a56ad9e" + integrity sha512-Rz0wKUa+LTW1PLXmwnf8mn85EBzaGSt6qamqtmnh9Hflkc+EqiYMhtUJeLdV+wsgYq4aG0ANc+bpUDpsUhdnwg== + dependencies: + "@docusaurus/core" "2.4.1" + "@docusaurus/mdx-loader" "2.4.1" + "@docusaurus/module-type-aliases" "2.4.1" + "@docusaurus/plugin-content-blog" "2.4.1" + "@docusaurus/plugin-content-docs" "2.4.1" + "@docusaurus/plugin-content-pages" "2.4.1" + "@docusaurus/theme-common" "2.4.1" + "@docusaurus/theme-translations" "2.4.1" + "@docusaurus/types" "2.4.1" + "@docusaurus/utils" "2.4.1" + "@docusaurus/utils-common" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" "@mdx-js/react" "^1.6.22" clsx "^1.2.1" copy-text-to-clipboard "^3.0.1" - infima "0.2.0-alpha.42" + infima "0.2.0-alpha.43" lodash "^4.17.21" nprogress "^0.2.0" postcss "^8.4.14" @@ -1776,17 +1671,18 @@ tslib "^2.4.0" utility-types "^3.10.0" -"@docusaurus/theme-common@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-2.2.0.tgz#2303498d80448aafdd588b597ce9d6f4cfa930e4" - integrity sha512-R8BnDjYoN90DCL75gP7qYQfSjyitXuP9TdzgsKDmSFPNyrdE3twtPNa2dIN+h+p/pr+PagfxwWbd6dn722A1Dw== - dependencies: - "@docusaurus/mdx-loader" "2.2.0" - "@docusaurus/module-type-aliases" "2.2.0" - "@docusaurus/plugin-content-blog" "2.2.0" - "@docusaurus/plugin-content-docs" "2.2.0" - "@docusaurus/plugin-content-pages" "2.2.0" - "@docusaurus/utils" "2.2.0" +"@docusaurus/theme-common@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-2.4.1.tgz#03e16f7aa96455e952f3243ac99757b01a3c83d4" + integrity sha512-G7Zau1W5rQTaFFB3x3soQoZpkgMbl/SYNG8PfMFIjKa3M3q8n0m/GRf5/H/e5BqOvt8c+ZWIXGCiz+kUCSHovA== + dependencies: + "@docusaurus/mdx-loader" "2.4.1" + "@docusaurus/module-type-aliases" "2.4.1" + "@docusaurus/plugin-content-blog" "2.4.1" + "@docusaurus/plugin-content-docs" "2.4.1" + "@docusaurus/plugin-content-pages" "2.4.1" + "@docusaurus/utils" "2.4.1" + "@docusaurus/utils-common" "2.4.1" "@types/history" "^4.7.11" "@types/react" "*" "@types/react-router-config" "*" @@ -1794,56 +1690,43 @@ parse-numeric-range "^1.3.0" prism-react-renderer "^1.3.5" tslib "^2.4.0" + use-sync-external-store "^1.2.0" utility-types "^3.10.0" -"@docusaurus/theme-search-algolia@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.2.0.tgz#77fd9f7a600917e6024fe3ac7fb6cfdf2ce84737" - integrity sha512-2h38B0tqlxgR2FZ9LpAkGrpDWVdXZ7vltfmTdX+4RsDs3A7khiNsmZB+x/x6sA4+G2V2CvrsPMlsYBy5X+cY1w== +"@docusaurus/theme-search-algolia@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.4.1.tgz#906bd2cca3fced0241985ef502c892f58ff380fc" + integrity sha512-6BcqW2lnLhZCXuMAvPRezFs1DpmEKzXFKlYjruuas+Xy3AQeFzDJKTJFIm49N77WFCTyxff8d3E4Q9pi/+5McQ== dependencies: "@docsearch/react" "^3.1.1" - "@docusaurus/core" "2.2.0" - "@docusaurus/logger" "2.2.0" - "@docusaurus/plugin-content-docs" "2.2.0" - "@docusaurus/theme-common" "2.2.0" - "@docusaurus/theme-translations" "2.2.0" - "@docusaurus/utils" "2.2.0" - "@docusaurus/utils-validation" "2.2.0" + "@docusaurus/core" "2.4.1" + "@docusaurus/logger" "2.4.1" + "@docusaurus/plugin-content-docs" "2.4.1" + "@docusaurus/theme-common" "2.4.1" + "@docusaurus/theme-translations" "2.4.1" + "@docusaurus/utils" "2.4.1" + "@docusaurus/utils-validation" "2.4.1" algoliasearch "^4.13.1" algoliasearch-helper "^3.10.0" clsx "^1.2.1" - eta "^1.12.3" + eta "^2.0.0" fs-extra "^10.1.0" lodash "^4.17.21" tslib "^2.4.0" utility-types "^3.10.0" -"@docusaurus/theme-translations@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-translations/-/theme-translations-2.2.0.tgz#5fbd4693679806f80c26eeae1381e1f2c23d83e7" - integrity sha512-3T140AG11OjJrtKlY4pMZ5BzbGRDjNs2co5hJ6uYJG1bVWlhcaFGqkaZ5lCgKflaNHD7UHBHU9Ec5f69jTdd6w== +"@docusaurus/theme-translations@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-translations/-/theme-translations-2.4.1.tgz#4d49df5865dae9ef4b98a19284ede62ae6f98726" + integrity sha512-T1RAGP+f86CA1kfE8ejZ3T3pUU3XcyvrGMfC/zxCtc2BsnoexuNI9Vk2CmuKCb+Tacvhxjv5unhxXce0+NKyvA== dependencies: fs-extra "^10.1.0" tslib "^2.4.0" -"@docusaurus/types@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-2.2.0.tgz#02c577a4041ab7d058a3c214ccb13647e21a9857" - integrity sha512-b6xxyoexfbRNRI8gjblzVOnLr4peCJhGbYGPpJ3LFqpi5nsFfoK4mmDLvWdeah0B7gmJeXabN7nQkFoqeSdmOw== - dependencies: - "@types/history" "^4.7.11" - "@types/react" "*" - commander "^5.1.0" - joi "^17.6.0" - react-helmet-async "^1.3.0" - utility-types "^3.10.0" - webpack "^5.73.0" - webpack-merge "^5.8.0" - -"@docusaurus/types@2.3.1": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-2.3.1.tgz#785ade2e0f4e35e1eb7fb0d04c27d11c3991a2e8" - integrity sha512-PREbIRhTaNNY042qmfSE372Jb7djZt+oVTZkoqHJ8eff8vOIc2zqqDqBVc5BhOfpZGPTrE078yy/torUEZy08A== +"@docusaurus/types@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-2.4.1.tgz#d8e82f9e0f704984f98df1f93d6b4554d5458705" + integrity sha512-0R+cbhpMkhbRXX138UOc/2XZFF8hiZa6ooZAEEJFp5scytzCw4tC1gChMFXrpa3d2tYE6AX8IrOEpSonLmfQuQ== dependencies: "@types/history" "^4.7.11" "@types/react" "*" @@ -1854,69 +1737,30 @@ webpack "^5.73.0" webpack-merge "^5.8.0" -"@docusaurus/utils-common@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-2.2.0.tgz#a401c1b93a8697dd566baf6ac64f0fdff1641a78" - integrity sha512-qebnerHp+cyovdUseDQyYFvMW1n1nv61zGe5JJfoNQUnjKuApch3IVsz+/lZ9a38pId8kqehC1Ao2bW/s0ntDA== - dependencies: - tslib "^2.4.0" - -"@docusaurus/utils-common@2.3.1": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-2.3.1.tgz#1abe66846eb641547e4964d44f3011938e58e50b" - integrity sha512-pVlRpXkdNcxmKNxAaB1ya2hfCEvVsLDp2joeM6K6uv55Oc5nVIqgyYSgSNKZyMdw66NnvMfsu0RBylcwZQKo9A== - dependencies: - tslib "^2.4.0" - -"@docusaurus/utils-validation@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-2.2.0.tgz#04d4d103137ad0145883971d3aa497f4a1315f25" - integrity sha512-I1hcsG3yoCkasOL5qQAYAfnmVoLei7apugT6m4crQjmDGxq+UkiRrq55UqmDDyZlac/6ax/JC0p+usZ6W4nVyg== +"@docusaurus/utils-common@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-2.4.1.tgz#7f72e873e49bd5179588869cc3ab7449a56aae63" + integrity sha512-bCVGdZU+z/qVcIiEQdyx0K13OC5mYwxhSuDUR95oFbKVuXYRrTVrwZIqQljuo1fyJvFTKHiL9L9skQOPokuFNQ== dependencies: - "@docusaurus/logger" "2.2.0" - "@docusaurus/utils" "2.2.0" - joi "^17.6.0" - js-yaml "^4.1.0" tslib "^2.4.0" -"@docusaurus/utils-validation@2.3.1": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-2.3.1.tgz#b65c718ba9b84b7a891bccf5ac6d19b57ee7d887" - integrity sha512-7n0208IG3k1HVTByMHlZoIDjjOFC8sbViHVXJx0r3Q+3Ezrx+VQ1RZ/zjNn6lT+QBCRCXlnlaoJ8ug4HIVgQ3w== +"@docusaurus/utils-validation@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-2.4.1.tgz#19959856d4a886af0c5cfb357f4ef68b51151244" + integrity sha512-unII3hlJlDwZ3w8U+pMO3Lx3RhI4YEbY3YNsQj4yzrkZzlpqZOLuAiZK2JyULnD+TKbceKU0WyWkQXtYbLNDFA== dependencies: - "@docusaurus/logger" "2.3.1" - "@docusaurus/utils" "2.3.1" + "@docusaurus/logger" "2.4.1" + "@docusaurus/utils" "2.4.1" joi "^17.6.0" js-yaml "^4.1.0" tslib "^2.4.0" -"@docusaurus/utils@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-2.2.0.tgz#3d6f9b7a69168d5c92d371bf21c556a4f50d1da6" - integrity sha512-oNk3cjvx7Tt1Lgh/aeZAmFpGV2pDr5nHKrBVx6hTkzGhrnMuQqLt6UPlQjdYQ3QHXwyF/ZtZMO1D5Pfi0lu7SA== - dependencies: - "@docusaurus/logger" "2.2.0" - "@svgr/webpack" "^6.2.1" - file-loader "^6.2.0" - fs-extra "^10.1.0" - github-slugger "^1.4.0" - globby "^11.1.0" - gray-matter "^4.0.3" - js-yaml "^4.1.0" - lodash "^4.17.21" - micromatch "^4.0.5" - resolve-pathname "^3.0.0" - shelljs "^0.8.5" - tslib "^2.4.0" - url-loader "^4.1.1" - webpack "^5.73.0" - -"@docusaurus/utils@2.3.1": - version "2.3.1" - resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-2.3.1.tgz#24b9cae3a23b1e6dc88f95c45722c7e82727b032" - integrity sha512-9WcQROCV0MmrpOQDXDGhtGMd52DHpSFbKLfkyaYumzbTstrbA5pPOtiGtxK1nqUHkiIv8UwexS54p0Vod2I1lg== +"@docusaurus/utils@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-2.4.1.tgz#9c5f76eae37b71f3819c1c1f0e26e6807c99a4fc" + integrity sha512-1lvEZdAQhKNht9aPXPoh69eeKnV0/62ROhQeFKKxmzd0zkcuE/Oc5Gpnt00y/f5bIsmOsYMY7Pqfm/5rteT5GA== dependencies: - "@docusaurus/logger" "2.3.1" + "@docusaurus/logger" "2.4.1" "@svgr/webpack" "^6.2.1" escape-string-regexp "^4.0.0" file-loader "^6.2.0" @@ -1981,13 +1825,13 @@ tslib "~2.1.0" "@graphql-tools/import@^6.2.6": - version "6.5.6" - resolved "https://registry.yarnpkg.com/@graphql-tools/import/-/import-6.5.6.tgz#9ea073d7fb4fdd177ecc2df4c553b61db9d55fcb" - integrity sha512-SxCpNhN3sIZM4wsMjQWXKkff/CBn7+WHoZ9OjZkdV5nxGbnzRKh5SZAAsvAFuj6Kst5Y9mlAaiwy+QufZZ1F1w== + version "6.7.18" + resolved "https://registry.yarnpkg.com/@graphql-tools/import/-/import-6.7.18.tgz#ad092d8a4546bb6ffc3e871e499eec7ac368680b" + integrity sha512-XQDdyZTp+FYmT7as3xRWH/x8dx0QZA2WZqfMF5EWb36a0PiH7WwlRQYIdyYXj8YCLpiWkeBXgBRHmMnwEYR8iQ== dependencies: - "@graphql-tools/utils" "8.5.0" + "@graphql-tools/utils" "^9.2.1" resolve-from "5.0.0" - tslib "~2.3.0" + tslib "^2.4.0" "@graphql-tools/json-file-loader@^6.2.6": version "6.2.6" @@ -2012,6 +1856,14 @@ unixify "1.0.0" valid-url "1.0.9" +"@graphql-tools/merge@8.3.1": + version "8.3.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-8.3.1.tgz#06121942ad28982a14635dbc87b5d488a041d722" + integrity sha512-BMm99mqdNZbEYeTPK3it9r9S6rsZsQKtlqJsSBknAclXq2pGEfOxjcIZi+kBSkHZKPKCRrYDd5vY0+rUmIHVLg== + dependencies: + "@graphql-tools/utils" "8.9.0" + tslib "^2.4.0" + "@graphql-tools/merge@^6.2.12": version "6.2.17" resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-6.2.17.tgz#4dedf87d8435a5e1091d7cc8d4f371ed1e029f1f" @@ -2021,14 +1873,6 @@ "@graphql-tools/utils" "8.0.2" tslib "~2.3.0" -"@graphql-tools/merge@^8.2.0": - version "8.2.0" - resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-8.2.0.tgz#8f6638eeecf19f59fef28598024308e0452dd0a2" - integrity sha512-nfMLYF7zczjnIbChZtqbvozRfuRweMD1Fe9HHd4RXd3Tcsj6E17srW0QJfxUoIIWh4pitj+XwZAwhj1PWBDU7g== - dependencies: - "@graphql-tools/utils" "^8.4.0" - tslib "~2.3.0" - "@graphql-tools/schema@^7.1.5": version "7.1.5" resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-7.1.5.tgz#07b24e52b182e736a6b77c829fc48b84d89aa711" @@ -2039,13 +1883,13 @@ value-or-promise "1.0.6" "@graphql-tools/schema@^8.0.2": - version "8.3.0" - resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-8.3.0.tgz#ddf4297859c6d7ac1ebbbd91460260ed424115db" - integrity sha512-OJD4Q1Xa3sffRiHzy0sskZz9ZWeqaujINfoim4CTk5Y9es1LS+WnKi25wVhmL2SGzzmKuAv7oDn+dpQAlM+Gfw== + version "8.5.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-8.5.1.tgz#c2f2ff1448380919a330312399c9471db2580b58" + integrity sha512-0Esilsh0P/qYcB5DKQpiKeQs/jevzIadNTaT0jeWklPMwNbT7yMX4EqZany7mbeRRlSRwMzNzL5olyFdffHBZg== dependencies: - "@graphql-tools/merge" "^8.2.0" - "@graphql-tools/utils" "^8.4.0" - tslib "~2.3.0" + "@graphql-tools/merge" "8.3.1" + "@graphql-tools/utils" "8.9.0" + tslib "^2.4.0" value-or-promise "1.0.11" "@graphql-tools/url-loader@^6.8.2": @@ -2080,12 +1924,12 @@ dependencies: tslib "~2.3.0" -"@graphql-tools/utils@8.5.0", "@graphql-tools/utils@^8.4.0": - version "8.5.0" - resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-8.5.0.tgz#439487ac756d9458a33091e5e0435ddf8e794f3e" - integrity sha512-jMwLm6YdN+Vbqntg5GHqDvGLpLa/xPSpRs/c40d0rBuel77wo7AaQ8jHeBSpp9y+7kp7HrGSWff1u7yJ7F8ppw== +"@graphql-tools/utils@8.9.0": + version "8.9.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-8.9.0.tgz#c6aa5f651c9c99e1aca55510af21b56ec296cdb7" + integrity sha512-pjJIWH0XOVnYGXCqej8g/u/tsfV4LvLlj0eATKQu5zwnxd/TiTHq7Cg313qUPTFFHZ3PP5wJ15chYVtLDwaymg== dependencies: - tslib "~2.3.0" + tslib "^2.4.0" "@graphql-tools/utils@^7.0.0", "@graphql-tools/utils@^7.1.2", "@graphql-tools/utils@^7.5.0", "@graphql-tools/utils@^7.7.0", "@graphql-tools/utils@^7.7.1", "@graphql-tools/utils@^7.8.1", "@graphql-tools/utils@^7.9.0": version "7.10.0" @@ -2096,6 +1940,14 @@ camel-case "4.1.2" tslib "~2.2.0" +"@graphql-tools/utils@^9.2.1": + version "9.2.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-9.2.1.tgz#1b3df0ef166cfa3eae706e3518b17d5922721c57" + integrity sha512-WUw506Ql6xzmOORlriNrD6Ugx+HjVgYxt9KCXD9mHAak+eaXSwuGGPyE60hy9xaDEoXKBsG7SkG69ybitaVl6A== + dependencies: + "@graphql-typed-document-node/core" "^3.1.1" + tslib "^2.4.0" + "@graphql-tools/wrap@^7.0.4": version "7.0.8" resolved "https://registry.yarnpkg.com/@graphql-tools/wrap/-/wrap-7.0.8.tgz#ad41e487135ca3ea1ae0ea04bb3f596177fb4f50" @@ -2107,10 +1959,15 @@ tslib "~2.2.0" value-or-promise "1.0.6" +"@graphql-typed-document-node/core@^3.1.1": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" + integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== + "@hapi/hoek@^9.0.0": - version "9.2.1" - resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-9.2.1.tgz#9551142a1980503752536b5050fd99f4a7f13b17" - integrity sha512-gfta+H8aziZsm8pZa0vj04KO6biEiisppNgA1kbJvFrrWu9Vm7eaUEy76DIxsuTaWvti5fkJVhllWc6ZTE+Mdw== + version "9.3.0" + resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-9.3.0.tgz#8368869dcb735be2e7f5cb7647de78e167a251fb" + integrity sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ== "@hapi/topo@^5.0.0": version "5.1.0" @@ -2119,63 +1976,82 @@ dependencies: "@hapi/hoek" "^9.0.0" -"@jridgewell/gen-mapping@^0.1.0": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" - integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== +"@jest/schemas@^29.6.3": + version "29.6.3" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03" + integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA== dependencies: - "@jridgewell/set-array" "^1.0.0" - "@jridgewell/sourcemap-codec" "^1.4.10" + "@sinclair/typebox" "^0.27.8" + +"@jest/types@^29.6.3": + version "29.6.3" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59" + integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw== + dependencies: + "@jest/schemas" "^29.6.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" "@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": - version "0.3.2" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" - integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== dependencies: "@jridgewell/set-array" "^1.0.1" "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/resolve-uri@^3.0.3": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" - integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" + integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== -"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": +"@jridgewell/set-array@^1.0.1": version "1.1.2" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== -"@jridgewell/source-map@^0.3.2": - version "0.3.2" - resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" - integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== +"@jridgewell/source-map@^0.3.3": + version "0.3.5" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.5.tgz#a3bb4d5c6825aab0d281268f47f6ad5853431e91" + integrity sha512-UTYAUj/wviwdsMfzoSJspJxbkH5o1snzwX0//0ENX1u/55kkZZkcTZP6u9bwKGkv+dkk9at4m1Cpt0uY80kcpQ== dependencies: "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.14" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" - integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== -"@jridgewell/trace-mapping@^0.3.14", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.15" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz#aba35c48a38d3fd84b37e66c9c0423f9744f9774" - integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== dependencies: "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.19" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz#f8a3249862f91be48d3127c3cfe992f79b4b8811" + integrity sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@leichtgewicht/ip-codec@^2.0.1": version "2.0.4" resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== "@markprompt/core@^0.4.5": - version "0.4.5" - resolved "https://registry.yarnpkg.com/@markprompt/core/-/core-0.4.5.tgz#356b7ad57a893b52076efe073db182a2d94e7fa9" - integrity sha512-HIHzD63KoisI5W7EYlz3b5uukjDBaxV9kIiqCXgPSw5aJnjLOazT2iFAI+bF2UX4xWFu2sE3RSg/JOjKdv15Dw== + version "0.4.6" + resolved "https://registry.yarnpkg.com/@markprompt/core/-/core-0.4.6.tgz#e68ad70704ae41a909c67de511063f3460e3c1d8" + integrity sha512-xqpIcrL9LTbhn0LMQNyVr503rotDvVI6jIJJC+z2h17zFXN3sxPG/sbX7UjR5xPcJuF5zLxzm1fqrBFZALAYxg== "@markprompt/react@^0.3.5": version "0.3.5" @@ -2256,13 +2132,13 @@ "@octokit/types" "^6.0.3" "@octokit/core@^3.5.1": - version "3.5.1" - resolved "https://registry.yarnpkg.com/@octokit/core/-/core-3.5.1.tgz#8601ceeb1ec0e1b1b8217b960a413ed8e947809b" - integrity sha512-omncwpLVxMP+GLpLPgeGJBF6IWJFjXDS5flY5VbppePYX9XehevbDykRH9PdCdvqt9TS5AOTiDide7h0qrkHjw== + version "3.6.0" + resolved "https://registry.yarnpkg.com/@octokit/core/-/core-3.6.0.tgz#3376cb9f3008d9b3d110370d90e0a1fcd5fe6085" + integrity sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q== dependencies: "@octokit/auth-token" "^2.4.4" "@octokit/graphql" "^4.5.8" - "@octokit/request" "^5.6.0" + "@octokit/request" "^5.6.3" "@octokit/request-error" "^2.0.5" "@octokit/types" "^6.0.3" before-after-hook "^2.2.0" @@ -2286,17 +2162,17 @@ "@octokit/types" "^6.0.3" universal-user-agent "^6.0.0" -"@octokit/openapi-types@^11.2.0": - version "11.2.0" - resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-11.2.0.tgz#b38d7fc3736d52a1e96b230c1ccd4a58a2f400a6" - integrity sha512-PBsVO+15KSlGmiI8QAzaqvsNlZlrDlyAJYcrXBCvVUxCp7VnXjkwPoFHgjEJXx3WF9BAwkA6nfCUA7i9sODzKA== +"@octokit/openapi-types@^12.11.0": + version "12.11.0" + resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-12.11.0.tgz#da5638d64f2b919bca89ce6602d059f1b52d3ef0" + integrity sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ== "@octokit/plugin-paginate-rest@^2.16.8": - version "2.17.0" - resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.17.0.tgz#32e9c7cab2a374421d3d0de239102287d791bce7" - integrity sha512-tzMbrbnam2Mt4AhuyCHvpRkS0oZ5MvwwcQPYGtMv4tUa5kkzG58SVB0fcsLulOZQeRnOgdkZWkRUiyBlh0Bkyw== + version "2.21.3" + resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz#7f12532797775640dbb8224da577da7dc210c87e" + integrity sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw== dependencies: - "@octokit/types" "^6.34.0" + "@octokit/types" "^6.40.0" "@octokit/plugin-request-log@^1.0.4": version "1.0.4" @@ -2304,11 +2180,11 @@ integrity sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA== "@octokit/plugin-rest-endpoint-methods@^5.12.0": - version "5.13.0" - resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.13.0.tgz#8c46109021a3412233f6f50d28786f8e552427ba" - integrity sha512-uJjMTkN1KaOIgNtUPMtIXDOjx6dGYysdIFhgA52x4xSadQCz3b/zJexvITDVpANnfKPW/+E0xkOvLntqMYpviA== + version "5.16.2" + resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz#7ee8bf586df97dd6868cf68f641354e908c25342" + integrity sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw== dependencies: - "@octokit/types" "^6.34.0" + "@octokit/types" "^6.39.0" deprecation "^2.3.1" "@octokit/plugin-retry@^3.0.9": @@ -2320,9 +2196,9 @@ bottleneck "^2.15.3" "@octokit/plugin-throttling@^3.5.1": - version "3.5.2" - resolved "https://registry.yarnpkg.com/@octokit/plugin-throttling/-/plugin-throttling-3.5.2.tgz#8b1797a5f14edbca0b8af619394056ed0ed5c9b5" - integrity sha512-Eu7kfJxU8vmHqWGNszWpg+GVp2tnAfax3XQV5CkYPEE69C+KvInJXW9WajgSeW+cxYe0UVdouzCtcreGNuJo7A== + version "3.7.0" + resolved "https://registry.yarnpkg.com/@octokit/plugin-throttling/-/plugin-throttling-3.7.0.tgz#a35cd05de22b2ef13fde45390d983ff8365b9a9e" + integrity sha512-qrKT1Yl/KuwGSC6/oHpLBot3ooC9rq0/ryDYBCpkRtoj+R8T47xTMDT6Tk2CxWopFota/8Pi/2SqArqwC0JPow== dependencies: "@octokit/types" "^6.0.1" bottleneck "^2.15.3" @@ -2336,16 +2212,16 @@ deprecation "^2.0.0" once "^1.4.0" -"@octokit/request@^5.6.0": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@octokit/request/-/request-5.6.2.tgz#1aa74d5da7b9e04ac60ef232edd9a7438dcf32d8" - integrity sha512-je66CvSEVf0jCpRISxkUcCa0UkxmFs6eGDRSbfJtAVwbLH5ceqF+YEyC8lj8ystKyZTy8adWr0qmkY52EfOeLA== +"@octokit/request@^5.6.0", "@octokit/request@^5.6.3": + version "5.6.3" + resolved "https://registry.yarnpkg.com/@octokit/request/-/request-5.6.3.tgz#19a022515a5bba965ac06c9d1334514eb50c48b0" + integrity sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A== dependencies: "@octokit/endpoint" "^6.0.1" "@octokit/request-error" "^2.1.0" "@octokit/types" "^6.16.1" is-plain-object "^5.0.0" - node-fetch "^2.6.1" + node-fetch "^2.6.7" universal-user-agent "^6.0.0" "@octokit/rest@^18.6.2": @@ -2358,195 +2234,235 @@ "@octokit/plugin-request-log" "^1.0.4" "@octokit/plugin-rest-endpoint-methods" "^5.12.0" -"@octokit/types@^6.0.1", "@octokit/types@^6.0.3", "@octokit/types@^6.16.1", "@octokit/types@^6.34.0": - version "6.34.0" - resolved "https://registry.yarnpkg.com/@octokit/types/-/types-6.34.0.tgz#c6021333334d1ecfb5d370a8798162ddf1ae8218" - integrity sha512-s1zLBjWhdEI2zwaoSgyOFoKSl109CUcVBCc7biPJ3aAf6LGLU6szDvi31JPU7bxfla2lqfhjbbg/5DdFNxOwHw== +"@octokit/types@^6.0.1", "@octokit/types@^6.0.3", "@octokit/types@^6.16.1", "@octokit/types@^6.39.0", "@octokit/types@^6.40.0": + version "6.41.0" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-6.41.0.tgz#e58ef78d78596d2fb7df9c6259802464b5f84a04" + integrity sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg== dependencies: - "@octokit/openapi-types" "^11.2.0" + "@octokit/openapi-types" "^12.11.0" "@polka/url@^1.0.0-next.20": version "1.0.0-next.21" resolved "https://registry.yarnpkg.com/@polka/url/-/url-1.0.0-next.21.tgz#5de5a2385a35309427f6011992b544514d559aa1" integrity sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g== -"@radix-ui/primitive@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@radix-ui/primitive/-/primitive-1.0.0.tgz#e1d8ef30b10ea10e69c76e896f608d9276352253" - integrity sha512-3e7rn8FDMin4CgeL7Z/49smCA3rFYY3Ha2rUQ7HRWFadS5iCRw08ZgVT1LaNTCNqgvrUiyczLflrVrF0SRQtNA== +"@radix-ui/primitive@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/primitive/-/primitive-1.0.1.tgz#e46f9958b35d10e9f6dc71c497305c22e3e55dbd" + integrity sha512-yQ8oGX2GVsEYMWGxcovu1uGWPCxV5BFfeeYxqPmuAzUyLT9qmaMXSAhXpb0WrspIeqYzdJpkh2vHModJPgRIaw== dependencies: "@babel/runtime" "^7.13.10" -"@radix-ui/react-compose-refs@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.0.tgz#37595b1f16ec7f228d698590e78eeed18ff218ae" - integrity sha512-0KaSv6sx787/hK3eF53iOkiSLwAGlFMx5lotrqD2pTjB18KbybKoEIgkNZTKC60YECDQTKGTRcDBILwZVqVKvA== +"@radix-ui/react-compose-refs@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz#7ed868b66946aa6030e580b1ffca386dd4d21989" + integrity sha512-fDSBgd44FKHa1FRMU59qBMPFcl2PZE+2nmqunj+BWFyYYjnhIDWL2ItDs3rrbJDQOtzt5nIebLCQc4QRfz6LJw== dependencies: "@babel/runtime" "^7.13.10" -"@radix-ui/react-context@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@radix-ui/react-context/-/react-context-1.0.0.tgz#f38e30c5859a9fb5e9aa9a9da452ee3ed9e0aee0" - integrity sha512-1pVM9RfOQ+n/N5PJK33kRSKsr1glNxomxONs5c49MliinBY6Yw2Q995qfBUUo0/Mbg05B/sGA0gkgPI7kmSHBg== +"@radix-ui/react-context@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-context/-/react-context-1.0.1.tgz#fe46e67c96b240de59187dcb7a1a50ce3e2ec00c" + integrity sha512-ebbrdFoYTcuZ0v4wG5tedGnp9tzcV8awzsxYph7gXUyvnNLuTIcCk1q17JEbnVhXAKG9oX3KtchwiMIAYp9NLg== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-dialog@^1.0.3": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-dialog/-/react-dialog-1.0.3.tgz#a715bf30f35fcd80476c0a07fcc073c1968e6d3e" - integrity sha512-owNhq36kNPqC2/a+zJRioPg6HHnTn5B/sh/NjTY8r4W9g1L5VJlrzZIVcBr7R9Mg8iLjVmh6MGgMlfoVf/WO/A== + version "1.0.4" + resolved "https://registry.yarnpkg.com/@radix-ui/react-dialog/-/react-dialog-1.0.4.tgz#06bce6c16bb93eb36d7a8589e665a20f4c1c52c1" + integrity sha512-hJtRy/jPULGQZceSAP2Re6/4NpKo8im6V8P2hUqZsdFiSL8l35kYsw3qbRI6Ay5mQd2+wlLqje770eq+RJ3yZg== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/primitive" "1.0.0" - "@radix-ui/react-compose-refs" "1.0.0" - "@radix-ui/react-context" "1.0.0" - "@radix-ui/react-dismissable-layer" "1.0.3" - "@radix-ui/react-focus-guards" "1.0.0" - "@radix-ui/react-focus-scope" "1.0.2" - "@radix-ui/react-id" "1.0.0" - "@radix-ui/react-portal" "1.0.2" - "@radix-ui/react-presence" "1.0.0" - "@radix-ui/react-primitive" "1.0.2" - "@radix-ui/react-slot" "1.0.1" - "@radix-ui/react-use-controllable-state" "1.0.0" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-context" "1.0.1" + "@radix-ui/react-dismissable-layer" "1.0.4" + "@radix-ui/react-focus-guards" "1.0.1" + "@radix-ui/react-focus-scope" "1.0.3" + "@radix-ui/react-id" "1.0.1" + "@radix-ui/react-portal" "1.0.3" + "@radix-ui/react-presence" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-slot" "1.0.2" + "@radix-ui/react-use-controllable-state" "1.0.1" aria-hidden "^1.1.1" react-remove-scroll "2.5.5" -"@radix-ui/react-dismissable-layer@1.0.3": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.3.tgz#63844d8e6bbcd010a513e7176d051c3c4044e09e" - integrity sha512-nXZOvFjOuHS1ovumntGV7NNoLaEp9JEvTht3MBjP44NSW5hUKj/8OnfN3+8WmB+CEhN44XaGhpHoSsUIEl5P7Q== +"@radix-ui/react-dismissable-layer@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.4.tgz#883a48f5f938fa679427aa17fcba70c5494c6978" + integrity sha512-7UpBa/RKMoHJYjie1gkF1DlK8l1fdU/VKDpoS3rCCo8YBJR294GwcEHyxHw72yvphJ7ld0AXEcSLAzY2F/WyCg== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/primitive" "1.0.0" - "@radix-ui/react-compose-refs" "1.0.0" - "@radix-ui/react-primitive" "1.0.2" - "@radix-ui/react-use-callback-ref" "1.0.0" - "@radix-ui/react-use-escape-keydown" "1.0.2" + "@radix-ui/primitive" "1.0.1" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-use-callback-ref" "1.0.1" + "@radix-ui/react-use-escape-keydown" "1.0.3" -"@radix-ui/react-focus-guards@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.0.tgz#339c1c69c41628c1a5e655f15f7020bf11aa01fa" - integrity sha512-UagjDk4ijOAnGu4WMUPj9ahi7/zJJqNZ9ZAiGPp7waUWJO0O1aWXi/udPphI0IUjvrhBsZJGSN66dR2dsueLWQ== +"@radix-ui/react-focus-guards@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.1.tgz#1ea7e32092216b946397866199d892f71f7f98ad" + integrity sha512-Rect2dWbQ8waGzhMavsIbmSVCgYxkXLxxR3ZvCX79JOglzdEy4JXMb98lq4hPxUbLr77nP0UOGf4rcMU+s1pUA== dependencies: "@babel/runtime" "^7.13.10" -"@radix-ui/react-focus-scope@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.2.tgz#5fe129cbdb5986d0a3ae16d14c473c243fe3bc79" - integrity sha512-spwXlNTfeIprt+kaEWE/qYuYT3ZAqJiAGjN/JgdvgVDTu8yc+HuX+WOWXrKliKnLnwck0F6JDkqIERncnih+4A== +"@radix-ui/react-focus-scope@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.3.tgz#9c2e8d4ed1189a1d419ee61edd5c1828726472f9" + integrity sha512-upXdPfqI4islj2CslyfUBNlaJCPybbqRHAi1KER7Isel9Q2AtSJ0zRBZv8mWQiFXD2nyAJ4BhC3yXgZ6kMBSrQ== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/react-compose-refs" "1.0.0" - "@radix-ui/react-primitive" "1.0.2" - "@radix-ui/react-use-callback-ref" "1.0.0" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-primitive" "1.0.3" + "@radix-ui/react-use-callback-ref" "1.0.1" -"@radix-ui/react-id@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@radix-ui/react-id/-/react-id-1.0.0.tgz#8d43224910741870a45a8c9d092f25887bb6d11e" - integrity sha512-Q6iAB/U7Tq3NTolBBQbHTgclPmGWE3OlktGGqrClPozSw4vkQ1DfQAOtzgRPecKsMdJINE05iaoDUG8tRzCBjw== +"@radix-ui/react-id@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-id/-/react-id-1.0.1.tgz#73cdc181f650e4df24f0b6a5b7aa426b912c88c0" + integrity sha512-tI7sT/kqYp8p96yGWY1OAnLHrqDgzHefRBKQ2YAkBS5ja7QLcZ9Z/uY7bEjPUatf8RomoXM8/1sMj1IJaE5UzQ== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/react-use-layout-effect" "1.0.0" + "@radix-ui/react-use-layout-effect" "1.0.1" -"@radix-ui/react-portal@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@radix-ui/react-portal/-/react-portal-1.0.2.tgz#102370b1027a767a371cab0243be4bc664f72330" - integrity sha512-swu32idoCW7KA2VEiUZGBSu9nB6qwGdV6k6HYhUoOo3M1FFpD+VgLzUqtt3mwL1ssz7r2x8MggpLSQach2Xy/Q== +"@radix-ui/react-portal@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-portal/-/react-portal-1.0.3.tgz#ffb961244c8ed1b46f039e6c215a6c4d9989bda1" + integrity sha512-xLYZeHrWoPmA5mEKEfZZevoVRK/Q43GfzRXkWV6qawIWWK8t6ifIiLQdd7rmQ4Vk1bmI21XhqF9BN3jWf+phpA== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/react-primitive" "1.0.2" + "@radix-ui/react-primitive" "1.0.3" -"@radix-ui/react-presence@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@radix-ui/react-presence/-/react-presence-1.0.0.tgz#814fe46df11f9a468808a6010e3f3ca7e0b2e84a" - integrity sha512-A+6XEvN01NfVWiKu38ybawfHsBjWum42MRPnEuqPsBZ4eV7e/7K321B5VgYMPv3Xx5An6o1/l9ZuDBgmcmWK3w== +"@radix-ui/react-presence@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-presence/-/react-presence-1.0.1.tgz#491990ba913b8e2a5db1b06b203cb24b5cdef9ba" + integrity sha512-UXLW4UAbIY5ZjcvzjfRFo5gxva8QirC9hF7wRE4U5gz+TP0DbRk+//qyuAQ1McDxBt1xNMBTaciFGvEmJvAZCg== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/react-compose-refs" "1.0.0" - "@radix-ui/react-use-layout-effect" "1.0.0" + "@radix-ui/react-compose-refs" "1.0.1" + "@radix-ui/react-use-layout-effect" "1.0.1" -"@radix-ui/react-primitive@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@radix-ui/react-primitive/-/react-primitive-1.0.2.tgz#54e22f49ca59ba88d8143090276d50b93f8a7053" - integrity sha512-zY6G5Qq4R8diFPNwtyoLRZBxzu1Z+SXMlfYpChN7Dv8gvmx9X3qhDqiLWvKseKVJMuedFeU/Sa0Sy/Ia+t06Dw== +"@radix-ui/react-primitive@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-primitive/-/react-primitive-1.0.3.tgz#d49ea0f3f0b2fe3ab1cb5667eb03e8b843b914d0" + integrity sha512-yi58uVyoAcK/Nq1inRY56ZSjKypBNKTa/1mcL8qdl6oJeEaDbOldlzrGn7P6Q3Id5d+SYNGc5AJgc4vGhjs5+g== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/react-slot" "1.0.1" + "@radix-ui/react-slot" "1.0.2" -"@radix-ui/react-slot@1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.0.1.tgz#e7868c669c974d649070e9ecbec0b367ee0b4d81" - integrity sha512-avutXAFL1ehGvAXtPquu0YK5oz6ctS474iM3vNGQIkswrVhdrS52e3uoMQBzZhNRAIE0jBnUyXWNmSjGHhCFcw== +"@radix-ui/react-slot@1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.0.2.tgz#a9ff4423eade67f501ffb32ec22064bc9d3099ab" + integrity sha512-YeTpuq4deV+6DusvVUW4ivBgnkHwECUu0BiN43L5UCDFgdhsRUWAghhTF5MbvNTPzmiFOx90asDSUjWuCNapwg== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/react-compose-refs" "1.0.0" + "@radix-ui/react-compose-refs" "1.0.1" -"@radix-ui/react-use-callback-ref@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.0.tgz#9e7b8b6b4946fe3cbe8f748c82a2cce54e7b6a90" - integrity sha512-GZtyzoHz95Rhs6S63D2t/eqvdFCm7I+yHMLVQheKM7nBD8mbZIt+ct1jz4536MDnaOGKIxynJ8eHTkVGVVkoTg== +"@radix-ui/react-use-callback-ref@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.1.tgz#f4bb1f27f2023c984e6534317ebc411fc181107a" + integrity sha512-D94LjX4Sp0xJFVaoQOd3OO9k7tpBYNOXdVhkltUbGv2Qb9OXdrg/CpsjlZv7ia14Sylv398LswWBVVu5nqKzAQ== dependencies: "@babel/runtime" "^7.13.10" -"@radix-ui/react-use-controllable-state@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.0.tgz#a64deaafbbc52d5d407afaa22d493d687c538b7f" - integrity sha512-FohDoZvk3mEXh9AWAVyRTYR4Sq7/gavuofglmiXB2g1aKyboUD4YtgWxKj8O5n+Uak52gXQ4wKz5IFST4vtJHg== +"@radix-ui/react-use-controllable-state@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.1.tgz#ecd2ced34e6330caf89a82854aa2f77e07440286" + integrity sha512-Svl5GY5FQeN758fWKrjM6Qb7asvXeiZltlT4U2gVfl8Gx5UAv2sMR0LWo8yhsIZh2oQ0eFdZ59aoOOMV7b47VA== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/react-use-callback-ref" "1.0.0" + "@radix-ui/react-use-callback-ref" "1.0.1" -"@radix-ui/react-use-escape-keydown@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.2.tgz#09ab6455ab240b4f0a61faf06d4e5132c4d639f6" - integrity sha512-DXGim3x74WgUv+iMNCF+cAo8xUHHeqvjx8zs7trKf+FkQKPQXLk2sX7Gx1ysH7Q76xCpZuxIJE7HLPxRE+Q+GA== +"@radix-ui/react-use-escape-keydown@1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.3.tgz#217b840c250541609c66f67ed7bab2b733620755" + integrity sha512-vyL82j40hcFicA+M4Ex7hVkB9vHgSse1ZWomAqV2Je3RleKGO5iM8KMOEtfoSB0PnIelMd2lATjTGMYqN5ylTg== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/react-use-callback-ref" "1.0.0" + "@radix-ui/react-use-callback-ref" "1.0.1" -"@radix-ui/react-use-layout-effect@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.0.tgz#2fc19e97223a81de64cd3ba1dc42ceffd82374dc" - integrity sha512-6Tpkq+R6LOlmQb1R5NNETLG0B4YP0wc+klfXafpUCj6JGyaUc8il7/kUZ7m59rGbXGczE9Bs+iz2qloqsZBduQ== +"@radix-ui/react-use-layout-effect@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.1.tgz#be8c7bc809b0c8934acf6657b577daf948a75399" + integrity sha512-v/5RegiJWYdoCvMnITBkNNx6bCj20fiaJnWtRkU18yITptraXjffz5Qbn05uOiQnOvi+dbkznkoaMltz1GnszQ== dependencies: "@babel/runtime" "^7.13.10" "@radix-ui/react-visually-hidden@^1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.0.2.tgz#29b117a59ef09a984bdad12cb98d81e8350be450" - integrity sha512-qirnJxtYn73HEk1rXL12/mXnu2rwsNHDID10th2JGtdK25T9wX+mxRmGt7iPSahw512GbZOc0syZX1nLQGoEOg== + version "1.0.3" + resolved "https://registry.yarnpkg.com/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.0.3.tgz#51aed9dd0fe5abcad7dee2a234ad36106a6984ac" + integrity sha512-D4w41yN5YRKtu464TLnByKzMDG/JlMPHtfZgQAu9v6mNakUqGUI9vUrfQKz8NK41VMm/xbZbh76NUTVtIYqOMA== dependencies: "@babel/runtime" "^7.13.10" - "@radix-ui/react-primitive" "1.0.2" + "@radix-ui/react-primitive" "1.0.3" + +"@rc-component/color-picker@~1.4.0": + version "1.4.1" + resolved "https://registry.yarnpkg.com/@rc-component/color-picker/-/color-picker-1.4.1.tgz#dcab0b660e9c4ed63a7582db68ed4a77c862cb93" + integrity sha512-vh5EWqnsayZa/JwUznqDaPJz39jznx/YDbyBuVJntv735tKXKwEUZZb2jYEldOg+NKWZwtALjGMrNeGBmqFoEw== + dependencies: + "@babel/runtime" "^7.10.1" + "@ctrl/tinycolor" "^3.6.0" + classnames "^2.2.6" + rc-util "^5.30.0" + +"@rc-component/context@^1.3.0": + version "1.4.0" + resolved "https://registry.yarnpkg.com/@rc-component/context/-/context-1.4.0.tgz#dc6fb021d6773546af8f016ae4ce9aea088395e8" + integrity sha512-kFcNxg9oLRMoL3qki0OMxK+7g5mypjgaaJp/pkOis/6rVxma9nJBF/8kCIuTYHUQNr0ii7MxqE33wirPZLJQ2w== + dependencies: + "@babel/runtime" "^7.10.1" + rc-util "^5.27.0" "@rc-component/mini-decimal@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@rc-component/mini-decimal/-/mini-decimal-1.0.1.tgz#e5dbc20a6a5b0e234d279bc71ce730ab865d3910" - integrity sha512-9N8nRk0oKj1qJzANKl+n9eNSMUGsZtjwNuDCiZ/KA+dt1fE3zq5x2XxclRcAbOIXnZcJ53ozP2Pa60gyELXagA== + version "1.1.0" + resolved "https://registry.yarnpkg.com/@rc-component/mini-decimal/-/mini-decimal-1.1.0.tgz#7b7a362b14a0a54cb5bc6fd2b82731f29f11d9b0" + integrity sha512-jS4E7T9Li2GuYwI6PyiVXmxTiM6b07rlD9Ge8uGZSCz3WlzcG5ZK7g5bbuKNeZ9pgUuPK/5guV781ujdVpm4HQ== dependencies: "@babel/runtime" "^7.18.0" -"@rc-component/portal@^1.0.0-6", "@rc-component/portal@^1.0.0-8", "@rc-component/portal@^1.0.0-9", "@rc-component/portal@^1.0.2": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@rc-component/portal/-/portal-1.0.3.tgz#3aa2c229a7a20ac2412d864e8977e6377973416e" - integrity sha512-rG9j7OMiI9eLFLF6G0B4OcfLac9W8Z7Vjeizbjt/A6R+zzw7vhHbJ4GIkrDpUqXDvFdEEzdxfICpb8/noLwG+w== +"@rc-component/mutate-observer@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@rc-component/mutate-observer/-/mutate-observer-1.1.0.tgz#ee53cc88b78aade3cd0653609215a44779386fd8" + integrity sha512-QjrOsDXQusNwGZPf4/qRQasg7UFEj06XiCJ8iuiq/Io7CrHrgVi6Uuetw60WAMG1799v+aM8kyc+1L/GBbHSlw== dependencies: "@babel/runtime" "^7.18.0" classnames "^2.3.2" rc-util "^5.24.4" -"@rc-component/tour@~1.0.1-2": - version "1.0.4" - resolved "https://registry.yarnpkg.com/@rc-component/tour/-/tour-1.0.4.tgz#551cdffbd9de0bfe82600f25f73452631cc29a37" - integrity sha512-FwAh9twryS6Ava2mUqwJtbhIt0ObIZIgQOJK+XTl+pQvsmXtUGtbOif3/4FeVmncy7FEGH7mnlIjS4OLGkQC9A== +"@rc-component/portal@^1.0.0-8", "@rc-component/portal@^1.0.0-9", "@rc-component/portal@^1.0.2", "@rc-component/portal@^1.1.0", "@rc-component/portal@^1.1.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@rc-component/portal/-/portal-1.1.2.tgz#55db1e51d784e034442e9700536faaa6ab63fc71" + integrity sha512-6f813C0IsasTZms08kfA8kPAGxbbkYToa8ALaiDIGGECU4i9hj8Plgbx0sNJDrey3EtHO30hmdaxtT0138xZcg== + dependencies: + "@babel/runtime" "^7.18.0" + classnames "^2.3.2" + rc-util "^5.24.4" + +"@rc-component/tour@~1.8.1": + version "1.8.1" + resolved "https://registry.yarnpkg.com/@rc-component/tour/-/tour-1.8.1.tgz#a820714b66cb17f317ebd21ac1b45733d2b99183" + integrity sha512-CsrQnfKgNArxx2j1RNHVLZgVA+rLrEj06lIsl4KSynMqADsqz8eKvVkr0F3p9PA10948M6WEEZt5a/FGAbGR2A== dependencies: "@babel/runtime" "^7.18.0" "@rc-component/portal" "^1.0.0-9" + "@rc-component/trigger" "^1.3.6" classnames "^2.3.2" - rc-trigger "^5.3.4" rc-util "^5.24.4" +"@rc-component/trigger@^1.0.4", "@rc-component/trigger@^1.15.0", "@rc-component/trigger@^1.3.6", "@rc-component/trigger@^1.5.0", "@rc-component/trigger@^1.6.2", "@rc-component/trigger@^1.7.0": + version "1.15.6" + resolved "https://registry.yarnpkg.com/@rc-component/trigger/-/trigger-1.15.6.tgz#ccb71f16229e832e15b3869817cbe24f5e59b54c" + integrity sha512-Tl19KaGsShf4yzqxumsXVT4c7j0l20Dxe5hgP5S0HmxyhCg3oKen28ntGavRCIPW7cl7wgsGotntqcIokgDHzg== + dependencies: + "@babel/runtime" "^7.18.3" + "@rc-component/portal" "^1.1.0" + classnames "^2.3.2" + rc-align "^4.0.0" + rc-motion "^2.0.0" + rc-resize-observer "^1.3.1" + rc-util "^5.33.0" + "@sideway/address@^4.1.3": version "4.1.4" resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" @@ -2554,7 +2470,7 @@ dependencies: "@hapi/hoek" "^9.0.0" -"@sideway/formula@^3.0.0": +"@sideway/formula@^3.0.1": version "3.0.1" resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.1.tgz#80fcbcbaf7ce031e0ef2dd29b1bfc7c3f583611f" integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg== @@ -2564,6 +2480,11 @@ resolved "https://registry.yarnpkg.com/@sideway/pinpoint/-/pinpoint-2.0.0.tgz#cff8ffadc372ad29fd3f78277aeb29e632cc70df" integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ== +"@sinclair/typebox@^0.27.8": + version "0.27.8" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" + integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== + "@sindresorhus/is@^0.14.0": version "0.14.0" resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" @@ -2584,14 +2505,14 @@ integrity sha512-9PYGcXrAxitycIjRmZB+Q0JaN07GZIWaTBIGQzfaZv+qr1n8X1XUEJ5rZ/vx6OVD9RRYlrNnXWExQXcmZeD/BQ== "@svgr/babel-plugin-remove-jsx-attribute@*": - version "6.5.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-6.5.0.tgz#652bfd4ed0a0699843585cda96faeb09d6e1306e" - integrity sha512-8zYdkym7qNyfXpWvu4yq46k41pyNM9SOstoWhKlm+IfdCE1DdnRKeMUPsWIEO/DEkaWxJ8T9esNdG3QwQ93jBA== + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz#69177f7937233caca3a1afb051906698f2f59186" + integrity sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA== "@svgr/babel-plugin-remove-jsx-empty-expression@*": - version "6.5.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-6.5.0.tgz#4b78994ab7d39032c729903fc2dd5c0fa4565cb8" - integrity sha512-NFdxMq3xA42Kb1UbzCVxplUc0iqSyM9X8kopImvFnB+uSDdzIHOdbs1op8ofAvVRtbg4oZiyRl3fTYeKcOe9Iw== + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz#c2c48104cfd7dcd557f373b70a56e9e3bdae1d44" + integrity sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA== "@svgr/babel-plugin-replace-jsx-attribute-value@^6.5.1": version "6.5.1" @@ -2696,6 +2617,26 @@ resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== +"@tsconfig/node10@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" + integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== + "@types/body-parser@*": version "1.19.2" resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" @@ -2712,9 +2653,9 @@ "@types/node" "*" "@types/connect-history-api-fallback@^1.3.5": - version "1.3.5" - resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" - integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== + version "1.5.0" + resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.5.0.tgz#9fd20b3974bdc2bcd4ac6567e2e0f6885cb2cf41" + integrity sha512-4x5FkPpLipqwthjPsF7ZRbOv3uoLUFkTA9G9v583qi4pACvq0uTELrB8OLUzPWUI4IJIyvM85vzkV1nyiI2Lig== dependencies: "@types/express-serve-static-core" "*" "@types/node" "*" @@ -2727,9 +2668,9 @@ "@types/node" "*" "@types/debug@^4.0.0": - version "4.1.7" - resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82" - integrity sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg== + version "4.1.8" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.8.tgz#cef723a5d0a90990313faec2d1e22aee5eecb317" + integrity sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ== dependencies: "@types/ms" "*" @@ -2742,43 +2683,44 @@ "@types/estree" "*" "@types/eslint@*": - version "7.28.1" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.28.1.tgz#50b07747f1f84c2ba8cd394cf0fe0ba07afce320" - integrity sha512-XhZKznR3i/W5dXqUhgU9fFdJekufbeBd5DALmkuXoeFcjbQcPk+2cL+WLHf6Q81HWAnM2vrslIHpGVyCAviRwg== + version "8.44.2" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.44.2.tgz#0d21c505f98a89b8dd4d37fa162b09da6089199a" + integrity sha512-sdPRb9K6iL5XZOmBubg8yiFp5yS/JdUDQsq5e6h95km91MCYMuvp7mh1fjPEYUhvHepKpZOjnEaMBR4PxjWDzg== dependencies: "@types/estree" "*" "@types/json-schema" "*" -"@types/estree@*", "@types/estree@^0.0.51": - version "0.0.51" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" - integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== +"@types/estree@*", "@types/estree@^1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" + integrity sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA== -"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": - version "4.17.30" - resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.30.tgz#0f2f99617fa8f9696170c46152ccf7500b34ac04" - integrity sha512-gstzbTWro2/nFed1WXtf+TtrpwxH7Ggs4RLYTLbeVgIkUQOI3WG/JKjgeOU1zXDvezllupjrf8OPIdvTbIaVOQ== +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33": + version "4.17.36" + resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.36.tgz#baa9022119bdc05a4adfe740ffc97b5f9360e545" + integrity sha512-zbivROJ0ZqLAtMzgzIUC4oNqDG9iF0lSsAqpOD9kbs5xcIM3dTiyuHvBc7R8MtWBp3AAWGaovJa+wzWPjLYW7Q== dependencies: "@types/node" "*" "@types/qs" "*" "@types/range-parser" "*" + "@types/send" "*" "@types/express@*", "@types/express@^4.17.13": - version "4.17.13" - resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034" - integrity sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA== + version "4.17.17" + resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.17.tgz#01d5437f6ef9cfa8668e616e13c2f2ac9a491ae4" + integrity sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q== dependencies: "@types/body-parser" "*" - "@types/express-serve-static-core" "^4.17.18" + "@types/express-serve-static-core" "^4.17.33" "@types/qs" "*" "@types/serve-static" "*" "@types/hast@^2.0.0": - version "2.3.4" - resolved "https://registry.yarnpkg.com/@types/hast/-/hast-2.3.4.tgz#8aa5ef92c117d20d974a82bdfb6a648b08c0bafc" - integrity sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g== + version "2.3.5" + resolved "https://registry.yarnpkg.com/@types/hast/-/hast-2.3.5.tgz#08caac88b44d0fdd04dc17a19142355f43bd8a7a" + integrity sha512-SvQi0L/lNpThgPoleH53cdjB3y9zpLlVjRbqB3rH8hx1jiRSBGAhyjV3H+URFjNVRqt2EdYNrbZE5IsGlNfpRg== dependencies: - "@types/unist" "*" + "@types/unist" "^2" "@types/history@^4.7.11": version "4.7.11" @@ -2786,21 +2728,45 @@ integrity sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA== "@types/html-minifier-terser@^6.0.0": - version "6.0.0" - resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.0.0.tgz#563c1c6c132cd204e71512f9c0b394ff90d3fae7" - integrity sha512-NZwaaynfs1oIoLAV1vg18e7QMVDvw+6SQrdJc8w3BwUaoroVSf6EBj/Sk4PBWGxsq0dzhA2drbsuMC1/6C6KgQ== + version "6.1.0" + resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== + +"@types/http-errors@*": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" + integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== "@types/http-proxy@^1.17.8": - version "1.17.9" - resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" - integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== + version "1.17.11" + resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.11.tgz#0ca21949a5588d55ac2b659b69035c84bd5da293" + integrity sha512-HC8G7c1WmaF2ekqpnFq626xd3Zz0uvaqFmBJNRZCGEZCXkvSdJoNFn/8Ygbd9fKNQj8UzLdCETaI0UWPAjK7IA== dependencies: "@types/node" "*" +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + "@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": - version "7.0.11" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" - integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + version "7.0.12" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.12.tgz#d70faba7039d5fca54c83c7dbab41051d2b6f6cb" + integrity sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA== "@types/marked@^2.0.2": version "2.0.5" @@ -2808,23 +2774,33 @@ integrity sha512-shRZ7XnYFD/8n8zSjKvFdto1QNSf4tONZIlNEZGrJe8GsOE8DL/hG1Hbl8gZlfLnjS7+f5tZGIaTgfpyW38h4w== "@types/mdast@^3.0.0": - version "3.0.10" - resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.10.tgz#4724244a82a4598884cbbe9bcfd73dff927ee8af" - integrity sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA== + version "3.0.12" + resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.12.tgz#beeb511b977c875a5b0cc92eab6fcac2f0895514" + integrity sha512-DT+iNIRNX884cx0/Q1ja7NyUPpZuv0KPyL5rGNxm1WC1OtHstl7n4Jb7nk+xacNShQMbczJjt8uFzznpp6kYBg== dependencies: - "@types/unist" "*" + "@types/unist" "^2" "@types/mime@*": version "3.0.1" resolved "https://registry.yarnpkg.com/@types/mime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== +"@types/mime@^1": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" + integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== + "@types/ms@*": version "0.7.31" resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197" integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA== -"@types/node@*", "@types/node@^17.0.5": +"@types/node@*": + version "20.5.7" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.5.7.tgz#4b8ecac87fbefbc92f431d09c30e176fc0a7c377" + integrity sha512-dP7f3LdZIysZnmvP3ANJYTSwg+wLLl8p7RqniVlV7j+oXSXAbt9h0WIBFmJy5inWZoX9wZN6eXx+YXd9Rh3RBA== + +"@types/node@^17.0.5": version "17.0.45" resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.45.tgz#2c0fafd78705e7a18b7906b5201a522719dc5190" integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw== @@ -2845,9 +2821,9 @@ integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== "@types/qs@*": - version "6.9.7" - resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" - integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + version "6.9.8" + resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.8.tgz#f2a7de3c107b89b441e071d5472e6b726b4adf45" + integrity sha512-u95svzDlTysU5xecFNTgfFG5RUWu1A9P0VzgpcIiGZA9iraHOdSzcxMxQ55DyeRaGCSxQi7LxXDI4rzq/MYfdg== "@types/range-parser@*": version "1.2.4" @@ -2855,13 +2831,13 @@ integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== "@types/react-router-config@*", "@types/react-router-config@^5.0.6": - version "5.0.6" - resolved "https://registry.yarnpkg.com/@types/react-router-config/-/react-router-config-5.0.6.tgz#87c5c57e72d241db900d9734512c50ccec062451" - integrity sha512-db1mx37a1EJDf1XeX8jJN7R3PZABmJQXR8r28yUjVMFSjkmnQo6X6pOEEmNl+Tp2gYQOGPdYbFIipBtdElZ3Yg== + version "5.0.7" + resolved "https://registry.yarnpkg.com/@types/react-router-config/-/react-router-config-5.0.7.tgz#36207a3fe08b271abee62b26993ee932d13cbb02" + integrity sha512-pFFVXUIydHlcJP6wJm7sDii5mD/bCmmAY0wQzq+M+uX7bqS95AQqHZWP1iNMKrWVQSuHIzj5qi9BvrtLX2/T4w== dependencies: "@types/history" "^4.7.11" "@types/react" "*" - "@types/react-router" "*" + "@types/react-router" "^5.1.0" "@types/react-router-dom@*": version "5.3.3" @@ -2872,7 +2848,7 @@ "@types/react" "*" "@types/react-router" "*" -"@types/react-router@*": +"@types/react-router@*", "@types/react-router@^5.1.0": version "5.1.20" resolved "https://registry.yarnpkg.com/@types/react-router/-/react-router-5.1.20.tgz#88eccaa122a82405ef3efbcaaa5dcdd9f021387c" integrity sha512-jGjmu/ZqS7FjSH6owMcD5qpq19+1RS9DeVRqfl1FeBMxTDQAGwlMWOcs52NDoXaNKyG3d1cYQFMs9rCrb88o9Q== @@ -2881,9 +2857,9 @@ "@types/react" "*" "@types/react@*": - version "18.0.18" - resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.18.tgz#9f16f33d57bc5d9dca848d12c3572110ff9429ac" - integrity sha512-6hI08umYs6NaiHFEEGioXnxJ+oEhY3eRz8VCUaudZmGdtvPviCJB8mgaMxaDWAdPSYd4eFavrPk2QIolwbLYrg== + version "18.2.21" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.21.tgz#774c37fd01b522d0b91aed04811b58e4e0514ed9" + integrity sha512-neFKG/sBAwGxHgXiIxnbm3/AAVQ/cMRS93hvBpg8xYRbeQSPVABp9U2bRnPf0iI4+Ucdv3plSxKK+3CW2ENJxA== dependencies: "@types/prop-types" "*" "@types/scheduler" "*" @@ -2902,9 +2878,17 @@ "@types/node" "*" "@types/scheduler@*": - version "0.16.2" - resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" - integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + version "0.16.3" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.3.tgz#cef09e3ec9af1d63d2a6cc5b383a737e24e6dcf5" + integrity sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ== + +"@types/send@*": + version "0.17.1" + resolved "https://registry.yarnpkg.com/@types/send/-/send-0.17.1.tgz#ed4932b8a2a805f1fe362a70f4e62d0ac994e301" + integrity sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q== + dependencies: + "@types/mime" "^1" + "@types/node" "*" "@types/serve-index@^1.9.1": version "1.9.1" @@ -2914,10 +2898,11 @@ "@types/express" "*" "@types/serve-static@*", "@types/serve-static@^1.13.10": - version "1.15.0" - resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.15.0.tgz#c7930ff61afb334e121a9da780aac0d9b8f34155" - integrity sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg== + version "1.15.2" + resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.15.2.tgz#3e5419ecd1e40e7405d34093f10befb43f63381a" + integrity sha512-J2LqtvFYCzaj8pVYKw8klQXrLLk7TBZmQ4ShlcdkELFKGwGMfevMLneMMRkMgZxotOD9wg497LpC7O8PcvAmfw== dependencies: + "@types/http-errors" "*" "@types/mime" "*" "@types/node" "*" @@ -2928,10 +2913,10 @@ dependencies: "@types/node" "*" -"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": - version "2.0.6" - resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d" - integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== +"@types/unist@^2", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": + version "2.0.8" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.8.tgz#bb197b9639aa1a04cf464a617fe800cccd92ad5c" + integrity sha512-d0XxK3YTObnWVp6rZuev3c49+j4Lo8g4L1ZRm9z5L0xpoZycUPshHgczK5gsUMaZOstjVYYi09p5gYvUtfChYw== "@types/websocket@1.0.2": version "1.0.2" @@ -2940,132 +2925,144 @@ dependencies: "@types/node" "*" -"@types/ws@^8.5.1": - version "8.5.3" - resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d" - integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== +"@types/ws@^8.5.5": + version "8.5.5" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.5.tgz#af587964aa06682702ee6dcbc7be41a80e4b28eb" + integrity sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg== dependencies: "@types/node" "*" -"@webassemblyjs/ast@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" - integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== +"@types/yargs-parser@*": + version "21.0.0" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^17.0.8": + version "17.0.24" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" + integrity sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw== + dependencies: + "@types/yargs-parser" "*" + +"@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" + integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== dependencies: - "@webassemblyjs/helper-numbers" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-numbers" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" -"@webassemblyjs/floating-point-hex-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" - integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== +"@webassemblyjs/floating-point-hex-parser@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" + integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== -"@webassemblyjs/helper-api-error@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" - integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== +"@webassemblyjs/helper-api-error@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" + integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== -"@webassemblyjs/helper-buffer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" - integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== +"@webassemblyjs/helper-buffer@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" + integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== -"@webassemblyjs/helper-numbers@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" - integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== +"@webassemblyjs/helper-numbers@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" + integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/floating-point-hex-parser" "1.11.6" + "@webassemblyjs/helper-api-error" "1.11.6" "@xtuc/long" "4.2.2" -"@webassemblyjs/helper-wasm-bytecode@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" - integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== +"@webassemblyjs/helper-wasm-bytecode@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" + integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== -"@webassemblyjs/helper-wasm-section@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" - integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== +"@webassemblyjs/helper-wasm-section@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" + integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/wasm-gen" "1.11.6" -"@webassemblyjs/ieee754@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" - integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== +"@webassemblyjs/ieee754@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" + integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== dependencies: "@xtuc/ieee754" "^1.2.0" -"@webassemblyjs/leb128@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" - integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== +"@webassemblyjs/leb128@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" + integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== dependencies: "@xtuc/long" "4.2.2" -"@webassemblyjs/utf8@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" - integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== - -"@webassemblyjs/wasm-edit@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" - integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/helper-wasm-section" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-opt" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - "@webassemblyjs/wast-printer" "1.11.1" - -"@webassemblyjs/wasm-gen@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" - integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wasm-opt@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" - integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - -"@webassemblyjs/wasm-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" - integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wast-printer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" - integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== - dependencies: - "@webassemblyjs/ast" "1.11.1" +"@webassemblyjs/utf8@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" + integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== + +"@webassemblyjs/wasm-edit@^1.11.5": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" + integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== + dependencies: + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/helper-wasm-section" "1.11.6" + "@webassemblyjs/wasm-gen" "1.11.6" + "@webassemblyjs/wasm-opt" "1.11.6" + "@webassemblyjs/wasm-parser" "1.11.6" + "@webassemblyjs/wast-printer" "1.11.6" + +"@webassemblyjs/wasm-gen@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" + integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== + dependencies: + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wasm-opt@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" + integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== + dependencies: + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/wasm-gen" "1.11.6" + "@webassemblyjs/wasm-parser" "1.11.6" + +"@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" + integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== + dependencies: + "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-api-error" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wast-printer@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" + integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== + dependencies: + "@webassemblyjs/ast" "1.11.6" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": @@ -3093,25 +3090,25 @@ accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: mime-types "~2.1.34" negotiator "0.6.3" -acorn-import-assertions@^1.7.6: - version "1.8.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" - integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== +acorn-import-assertions@^1.9.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" + integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== -acorn-walk@^8.0.0: +acorn-walk@^8.0.0, acorn-walk@^8.1.1: version "8.2.0" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== -acorn@^8.0.4, acorn@^8.5.0, acorn@^8.7.1: - version "8.8.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" - integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== +acorn@^8.0.4, acorn@^8.4.1, acorn@^8.7.1, acorn@^8.8.2: + version "8.10.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.10.0.tgz#8be5b3907a67221a81ab23c7889c4c5526b62ec5" + integrity sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw== address@^1.0.1, address@^1.1.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/address/-/address-1.2.0.tgz#d352a62c92fee90f89a693eccd2a8b2139ab02d9" - integrity sha512-tNEZYz5G/zYunxFm7sfhAxkXEuLj3K6BKwv6ZURlsF6yiUQ65z0Q2wZW9L5cPUl9ocofGvXOdFYbFHp0+6MOig== + version "1.2.2" + resolved "https://registry.yarnpkg.com/address/-/address-1.2.2.tgz#2b5248dac5485a6390532c6a517fda2e3faac89e" + integrity sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA== aggregate-error@^3.0.0: version "3.1.0" @@ -3133,7 +3130,7 @@ ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv-keywords@^5.0.0: +ajv-keywords@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== @@ -3150,10 +3147,10 @@ ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^8.0.0, ajv@^8.8.0: - version "8.11.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" - integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== +ajv@^8.0.0, ajv@^8.9.0: + version "8.12.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" + integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== dependencies: fast-deep-equal "^3.1.1" json-schema-traverse "^1.0.0" @@ -3161,31 +3158,31 @@ ajv@^8.0.0, ajv@^8.8.0: uri-js "^4.2.2" algoliasearch-helper@^3.10.0: - version "3.11.1" - resolved "https://registry.yarnpkg.com/algoliasearch-helper/-/algoliasearch-helper-3.11.1.tgz#d83ab7f1a2a374440686ef7a144b3c288b01188a" - integrity sha512-mvsPN3eK4E0bZG0/WlWJjeqe/bUD2KOEVOl0GyL/TGXn6wcpZU8NOuztGHCUKXkyg5gq6YzUakVTmnmSSO5Yiw== + version "3.14.0" + resolved "https://registry.yarnpkg.com/algoliasearch-helper/-/algoliasearch-helper-3.14.0.tgz#2409c2591952719ab6fba1de77b3bbe5094ab85e" + integrity sha512-gXDXzsSS0YANn5dHr71CUXOo84cN4azhHKUbg71vAWnH+1JBiR4jf7to3t3JHXknXkbV0F7f055vUSBKrltHLQ== dependencies: "@algolia/events" "^4.0.1" -algoliasearch@^4.0.0, algoliasearch@^4.13.1: - version "4.14.3" - resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-4.14.3.tgz#f02a77a4db17de2f676018938847494b692035e7" - integrity sha512-GZTEuxzfWbP/vr7ZJfGzIl8fOsoxN916Z6FY2Egc9q2TmZ6hvq5KfAxY89pPW01oW/2HDEKA8d30f9iAH9eXYg== - dependencies: - "@algolia/cache-browser-local-storage" "4.14.3" - "@algolia/cache-common" "4.14.3" - "@algolia/cache-in-memory" "4.14.3" - "@algolia/client-account" "4.14.3" - "@algolia/client-analytics" "4.14.3" - "@algolia/client-common" "4.14.3" - "@algolia/client-personalization" "4.14.3" - "@algolia/client-search" "4.14.3" - "@algolia/logger-common" "4.14.3" - "@algolia/logger-console" "4.14.3" - "@algolia/requester-browser-xhr" "4.14.3" - "@algolia/requester-common" "4.14.3" - "@algolia/requester-node-http" "4.14.3" - "@algolia/transporter" "4.14.3" +algoliasearch@^4.13.1, algoliasearch@^4.19.1: + version "4.19.1" + resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-4.19.1.tgz#18111fb422eaf841737adb92d5ab12133d244218" + integrity sha512-IJF5b93b2MgAzcE/tuzW0yOPnuUyRgGAtaPv5UUywXM8kzqfdwZTO4sPJBzoGz1eOy6H9uEchsJsBFTELZSu+g== + dependencies: + "@algolia/cache-browser-local-storage" "4.19.1" + "@algolia/cache-common" "4.19.1" + "@algolia/cache-in-memory" "4.19.1" + "@algolia/client-account" "4.19.1" + "@algolia/client-analytics" "4.19.1" + "@algolia/client-common" "4.19.1" + "@algolia/client-personalization" "4.19.1" + "@algolia/client-search" "4.19.1" + "@algolia/logger-common" "4.19.1" + "@algolia/logger-console" "4.19.1" + "@algolia/requester-browser-xhr" "4.19.1" + "@algolia/requester-common" "4.19.1" + "@algolia/requester-node-http" "4.19.1" + "@algolia/transporter" "4.19.1" ansi-align@^3.0.0, ansi-align@^3.0.1: version "3.0.1" @@ -3224,66 +3221,68 @@ ansi-styles@^4.0.0, ansi-styles@^4.1.0: color-convert "^2.0.1" ansi-styles@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.1.0.tgz#87313c102b8118abd57371afab34618bf7350ed3" - integrity sha512-VbqNsoz55SYGczauuup0MFUyXNQviSpFTj1RQtFzmQLk18qbVSpTFFGMT293rmDaQuKCT6InmbuEyUne4mTuxQ== + version "6.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== antd@^5.0.7: - version "5.0.7" - resolved "https://registry.yarnpkg.com/antd/-/antd-5.0.7.tgz#0c182ad33c9de81b204f4fa430d75480fad7e0d6" - integrity sha512-A/HO/VCS6nHMkpnjfPSxXoet1MBu6N003osaGbSI3mnOvRj0V/9jEOXJ2slj+2YNhACFYJfVyXeEamwQx2Njcw== - dependencies: - "@ant-design/colors" "^6.0.0" - "@ant-design/cssinjs" "^1.0.0" - "@ant-design/icons" "^4.7.0" - "@ant-design/react-slick" "~0.29.1" + version "5.8.5" + resolved "https://registry.yarnpkg.com/antd/-/antd-5.8.5.tgz#36574f6d4e90c683c9a77f7797416ac180035055" + integrity sha512-CaCl0BquElbSmilzKunyf22hVhgErKjzh2twQ+ZuFUSZfTOibi6/bwXp93lnT/VGgnc3Z3f2OizybzXWR7/tQg== + dependencies: + "@ant-design/colors" "^7.0.0" + "@ant-design/cssinjs" "^1.16.0" + "@ant-design/icons" "^5.2.2" + "@ant-design/react-slick" "~1.0.0" "@babel/runtime" "^7.18.3" - "@ctrl/tinycolor" "^3.4.0" - "@rc-component/tour" "~1.0.1-2" + "@ctrl/tinycolor" "^3.6.0" + "@rc-component/color-picker" "~1.4.0" + "@rc-component/mutate-observer" "^1.0.0" + "@rc-component/tour" "~1.8.1" + "@rc-component/trigger" "^1.15.0" classnames "^2.2.6" copy-to-clipboard "^3.2.0" dayjs "^1.11.1" - lodash "^4.17.21" - rc-cascader "~3.7.0" - rc-checkbox "~2.3.0" - rc-collapse "~3.4.2" - rc-dialog "~9.0.2" - rc-drawer "~6.0.0" - rc-dropdown "~4.0.0" - rc-field-form "~1.27.0" - rc-image "~5.12.0" - rc-input "~0.1.4" - rc-input-number "~7.4.0" - rc-mentions "~1.13.1" - rc-menu "~9.8.0" - rc-motion "^2.6.1" - rc-notification "~5.0.0-alpha.9" - rc-pagination "~3.2.0" - rc-picker "~3.1.1" + qrcode.react "^3.1.0" + rc-cascader "~3.14.0" + rc-checkbox "~3.1.0" + rc-collapse "~3.7.0" + rc-dialog "~9.1.0" + rc-drawer "~6.2.0" + rc-dropdown "~4.1.0" + rc-field-form "~1.36.0" + rc-image "~7.1.0" + rc-input "~1.1.0" + rc-input-number "~8.0.2" + rc-mentions "~2.5.0" + rc-menu "~9.10.0" + rc-motion "^2.7.3" + rc-notification "~5.0.4" + rc-pagination "~3.6.0" + rc-picker "~3.13.0" rc-progress "~3.4.1" - rc-rate "~2.9.0" + rc-rate "~2.12.0" rc-resize-observer "^1.2.0" - rc-segmented "~2.1.0" - rc-select "~14.1.13" - rc-slider "~10.0.0" - rc-steps "~6.0.0-alpha.2" - rc-switch "~4.0.0" - rc-table "~7.26.0" - rc-tabs "~12.4.2" - rc-textarea "~0.4.5" - rc-tooltip "~5.2.0" - rc-tree "~5.7.0" - rc-tree-select "~5.5.4" - rc-trigger "^5.2.10" + rc-segmented "~2.2.0" + rc-select "~14.7.1" + rc-slider "~10.1.0" + rc-steps "~6.0.1" + rc-switch "~4.1.0" + rc-table "~7.32.1" + rc-tabs "~12.9.0" + rc-textarea "~1.3.3" + rc-tooltip "~6.0.0" + rc-tree "~5.7.6" + rc-tree-select "~5.11.0" rc-upload "~4.3.0" - rc-util "^5.25.2" + rc-util "^5.37.0" scroll-into-view-if-needed "^3.0.3" - shallowequal "^1.1.0" + throttle-debounce "^5.0.0" anymatch@~3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" - integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + version "3.1.3" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" @@ -3358,12 +3357,12 @@ at-least-node@^1.0.0: integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== autoprefixer@^10.4.12, autoprefixer@^10.4.7: - version "10.4.13" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.13.tgz#b5136b59930209a321e9fa3dca2e7c4d223e83a8" - integrity sha512-49vKpMqcZYsJjwotvt4+h/BCjJVnhGwcLpDt5xkcaOG3eLrG/HUYLagrihYsQ+qrIBgIzX1Rw7a6L8I/ZA1Atg== + version "10.4.15" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.15.tgz#a1230f4aeb3636b89120b34a1f513e2f6834d530" + integrity sha512-KCuPB8ZCIqFdA4HwKXsvz7j6gvSDNhDP7WnUjBleRkKjPdvCmHFuQ77ocavI8FT6NdvlBnE2UFr2H4Mycn8Vew== dependencies: - browserslist "^4.21.4" - caniuse-lite "^1.0.30001426" + browserslist "^4.21.10" + caniuse-lite "^1.0.30001520" fraction.js "^4.2.0" normalize-range "^0.1.2" picocolors "^1.0.0" @@ -3377,9 +3376,9 @@ axios@^0.25.0: follow-redirects "^1.14.7" babel-loader@^8.2.5: - version "8.2.5" - resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e" - integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== + version "8.3.0" + resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.3.0.tgz#124936e841ba4fe8176786d6ff28add1f134d6a8" + integrity sha512-H8SvsMF+m9t15HNLMipppzkC+Y2Yq+v3SonZyU70RBL/h1gxPkH08Ot8pEE9Z4Kd+czyWJClmFS8qzIP9OZ04Q== dependencies: find-cache-dir "^3.3.1" loader-utils "^2.0.0" @@ -3408,37 +3407,29 @@ babel-plugin-extract-import-names@1.6.22: dependencies: "@babel/helper-plugin-utils" "7.10.4" -babel-plugin-polyfill-corejs2@^0.3.2, babel-plugin-polyfill-corejs2@^0.3.3: - version "0.3.3" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz#5d1bd3836d0a19e1b84bbf2d9640ccb6f951c122" - integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== - dependencies: - "@babel/compat-data" "^7.17.7" - "@babel/helper-define-polyfill-provider" "^0.3.3" - semver "^6.1.1" - -babel-plugin-polyfill-corejs3@^0.5.3: - version "0.5.3" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.3.tgz#d7e09c9a899079d71a8b670c6181af56ec19c5c7" - integrity sha512-zKsXDh0XjnrUEW0mxIHLfjBfnXSMr5Q/goMe/fxpQnLm07mcOZiIZHBNWCMx60HmdvjxfXcalac0tfFg0wqxyw== +babel-plugin-polyfill-corejs2@^0.4.5: + version "0.4.5" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.5.tgz#8097b4cb4af5b64a1d11332b6fb72ef5e64a054c" + integrity sha512-19hwUH5FKl49JEsvyTcoHakh6BE0wgXLLptIyKZ3PijHc/Ci521wygORCUCCred+E/twuqRyAkE02BAWPmsHOg== dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.2" - core-js-compat "^3.21.0" + "@babel/compat-data" "^7.22.6" + "@babel/helper-define-polyfill-provider" "^0.4.2" + semver "^6.3.1" -babel-plugin-polyfill-corejs3@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz#56ad88237137eade485a71b52f72dbed57c6230a" - integrity sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA== +babel-plugin-polyfill-corejs3@^0.8.3: + version "0.8.3" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.8.3.tgz#b4f719d0ad9bb8e0c23e3e630c0c8ec6dd7a1c52" + integrity sha512-z41XaniZL26WLrvjy7soabMXrfPWARN25PZoriDEiLMxAp50AUW3t35BGQUMg5xK3UrpVTtagIDklxYa+MhiNA== dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.3" - core-js-compat "^3.25.1" + "@babel/helper-define-polyfill-provider" "^0.4.2" + core-js-compat "^3.31.0" -babel-plugin-polyfill-regenerator@^0.4.0, babel-plugin-polyfill-regenerator@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz#390f91c38d90473592ed43351e801a9d3e0fd747" - integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== +babel-plugin-polyfill-regenerator@^0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.5.2.tgz#80d0f3e1098c080c8b5a65f41e9427af692dc326" + integrity sha512-tAlOptU0Xj34V1Y2PNTL4Y0FOJMDB6bZmoW39FeCQIhigGLkqu3Fj6uiXpxIf6Ij274ENdYx64y6Au+ZKlb1IA== dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.3" + "@babel/helper-define-polyfill-provider" "^0.4.2" backo2@^1.0.2: version "1.0.2" @@ -3476,9 +3467,9 @@ batch@0.6.1: integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== before-after-hook@^2.2.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.2.tgz#a6e8ca41028d90ee2c24222f201c90956091613e" - integrity sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ== + version "2.2.3" + resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" + integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== big.js@^5.2.2: version "5.2.2" @@ -3499,10 +3490,10 @@ bl@^4.0.3: inherits "^2.0.4" readable-stream "^3.4.0" -body-parser@1.20.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" - integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== +body-parser@1.20.1: + version "1.20.1" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" + integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== dependencies: bytes "3.1.2" content-type "~1.0.4" @@ -3512,15 +3503,15 @@ body-parser@1.20.0: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.10.3" + qs "6.11.0" raw-body "2.5.1" type-is "~1.6.18" unpipe "1.0.0" bonjour-service@^1.0.11: - version "1.0.14" - resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.0.14.tgz#c346f5bc84e87802d08f8d5a60b93f758e514ee7" - integrity sha512-HIMbgLnk1Vqvs6B4Wq5ep7mxvj9sGz5d1JJyDNSGNIdA/w2MCz6GTjWTdjqOJV1bEPj+6IkxDvWNFKEBxNt4kQ== + version "1.1.1" + resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.1.1.tgz#960948fa0e0153f5d26743ab15baf8e33752c135" + integrity sha512-Z/5lQRMOG9k7W+FkeGTNjh7htqn/2LMnfOvBZ8pynNZCM9MwkQkI3zeI4oz09uWdcgmgHugVvBqxGg4VQJ5PCg== dependencies: array-flatten "^2.1.2" dns-equal "^1.0.0" @@ -3580,15 +3571,15 @@ braces@^3.0.2, braces@~3.0.2: dependencies: fill-range "^7.0.1" -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.21.3, browserslist@^4.21.4: - version "4.21.4" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" - integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4.21.10, browserslist@^4.21.4, browserslist@^4.21.9: + version "4.21.10" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.10.tgz#dbbac576628c13d3b2231332cb2ec5a46e015bb0" + integrity sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ== dependencies: - caniuse-lite "^1.0.30001400" - electron-to-chromium "^1.4.251" - node-releases "^2.0.6" - update-browserslist-db "^1.0.9" + caniuse-lite "^1.0.30001517" + electron-to-chromium "^1.4.477" + node-releases "^2.0.13" + update-browserslist-db "^1.0.11" buffer-from@^1.0.0: version "1.1.2" @@ -3653,9 +3644,9 @@ camelcase-css@2.0.1: integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== camelcase@^6.2.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.2.0.tgz#924af881c9d525ac9d87f40d964e5cea982a1809" - integrity sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg== + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== caniuse-api@^3.0.0: version "3.0.0" @@ -3667,10 +3658,10 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400, caniuse-lite@^1.0.30001426: - version "1.0.30001441" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001441.tgz#987437b266260b640a23cd18fbddb509d7f69f3e" - integrity sha512-OyxRR4Vof59I3yGWXws6i908EtGbMzVUi3ganaZQHmydk1iwDhRnvaPG2WaR0KcqrDFKrxVZHULT396LEPhXfg== +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001517, caniuse-lite@^1.0.30001520: + version "1.0.30001525" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001525.tgz#d2e8fdec6116ffa36284ca2c33ef6d53612fe1c8" + integrity sha512-/3z+wB4icFt3r0USMwxujAqRvaD/B7rvGTsKhbhSQErVrJvkZCLhgNLJxU8MevahQVH6hCU9FsHdNUFbiwmE7Q== ccount@^1.0.0: version "1.1.0" @@ -3682,7 +3673,7 @@ ccount@^2.0.0: resolved "https://registry.yarnpkg.com/ccount/-/ccount-2.0.1.tgz#17a3bf82302e0870d6da43a01311a8bc02a3ecf5" integrity sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg== -chalk@^2.0.0: +chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -3691,7 +3682,7 @@ chalk@^2.0.0: escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^4.1.0, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -3774,20 +3765,20 @@ ci-info@^2.0.0: resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== -classnames@2.x, classnames@^2.2.1, classnames@^2.2.3, classnames@^2.2.5, classnames@^2.3.1, classnames@^2.3.2: +ci-info@^3.2.0: + version "3.8.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" + integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== + +classnames@2.x, classnames@^2.2.1, classnames@^2.2.3, classnames@^2.2.5, classnames@^2.2.6, classnames@^2.3.1, classnames@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.2.tgz#351d813bf0137fcc6a76a16b88208d2560a0d924" integrity sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw== -classnames@^2.2.6: - version "2.3.1" - resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" - integrity sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA== - clean-css@^5.2.2, clean-css@^5.3.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.1.tgz#d0610b0b90d125196a2894d35366f734e5d7aa32" - integrity sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg== + version "5.3.2" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.2.tgz#70ecc7d4d4114921f5d298349ff86a31a9975224" + integrity sha512-JVJbM+f3d3Q704rF4bqQ5UUyTtuJ0JRKNbTKVEeujCCBoMdkEi+V+e8oktO9qGQNSvHrFTM6JZRXrUvGR1czww== dependencies: source-map "~0.6.0" @@ -3807,9 +3798,9 @@ cli-boxes@^3.0.0: integrity sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g== cli-table3@^0.6.2: - version "0.6.2" - resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.2.tgz#aaf5df9d8b5bf12634dc8b3040806a0c07120d2a" - integrity sha512-QyavHCaIC80cMivimWu4aWHilIpiDpfm3hGmqAmXVL1UsnbLuBSMd21hTX6VY4ZSDSM73ESLeF8TOYId3rBTbw== + version "0.6.3" + resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.3.tgz#61ab765aac156b52f222954ffc607a6f01dbeeb2" + integrity sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg== dependencies: string-width "^4.2.0" optionalDependencies: @@ -3830,9 +3821,9 @@ clone-deep@^4.0.1: shallow-clone "^3.0.0" clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q== + version "1.0.3" + resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3" + integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== dependencies: mimic-response "^1.0.0" @@ -3892,14 +3883,14 @@ colord@^2.9.1: integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== colorette@^2.0.10: - version "2.0.19" - resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" - integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== + version "2.0.20" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" + integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== combine-promises@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/combine-promises/-/combine-promises-1.1.0.tgz#72db90743c0ca7aab7d0d8d2052fd7b0f674de71" - integrity sha512-ZI9jvcLDxqwaXEixOhArm3r7ReIivsXkpbyEWyeOhzz1QS0iSgBPnWvEqvIQtYyamGCYA88gFhmUrs9hrrQ0pg== + version "1.2.0" + resolved "https://registry.yarnpkg.com/combine-promises/-/combine-promises-1.2.0.tgz#5f2e68451862acf85761ded4d9e2af7769c2ca6a" + integrity sha512-VcQB1ziGD0NXrhKxiwyNbCDmRzs/OShMs2GqW2DlU2A/Sd0nQxE1oWDAE5O0ygSx5mgQOn9eIFh7yKPgFRVkPQ== combined-stream@^1.0.8: version "1.0.8" @@ -3963,10 +3954,10 @@ compression@^1.7.4: safe-buffer "5.1.2" vary "~1.1.2" -compute-scroll-into-view@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/compute-scroll-into-view/-/compute-scroll-into-view-2.0.2.tgz#ac5cc71ca833884866e581a82d8558a6ed7ee877" - integrity sha512-W+4Iti92hktsTtNPNeRM1vE0JdqCBk5qIabRafpr5pGrQhQ+xzCv0NGnFzTCKmW4yGLm9Aovbw8YNxloe2z9tQ== +compute-scroll-into-view@^3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/compute-scroll-into-view/-/compute-scroll-into-view-3.0.3.tgz#c418900a5c56e2b04b885b54995df164535962b1" + integrity sha512-nadqwNxghAGTamwIqQSG433W6OADZx2vCo3UXHNrzTRHK/htu+7+L0zhjEoaeaQVNAi3YgqWDv8+tzf0hRfR+A== concat-map@0.0.1: version "0.0.1" @@ -4013,16 +4004,14 @@ content-disposition@0.5.4: safe-buffer "5.2.1" content-type@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + version "1.0.5" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== convert-source-map@^1.7.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" - integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== - dependencies: - safe-buffer "~5.1.1" + version "1.9.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== cookie-signature@1.0.6: version "1.0.6" @@ -4035,9 +4024,9 @@ cookie@0.5.0: integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== copy-text-to-clipboard@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz#8cbf8f90e0a47f12e4a24743736265d157bce69c" - integrity sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q== + version "3.2.0" + resolved "https://registry.yarnpkg.com/copy-text-to-clipboard/-/copy-text-to-clipboard-3.2.0.tgz#0202b2d9bdae30a49a53f898626dcc3b49ad960b" + integrity sha512-RnJFp1XR/LOBDckxTib5Qjr/PMfkatD0MUCQgdpqS8MdKiNUzBjAQBEN6oUy+jW7LI93BBG3DtMB2KOOKpGs2Q== copy-to-clipboard@^3.2.0: version "3.3.3" @@ -4058,22 +4047,22 @@ copy-webpack-plugin@^11.0.0: schema-utils "^4.0.0" serialize-javascript "^6.0.0" -core-js-compat@^3.21.0, core-js-compat@^3.25.1: - version "3.27.0" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.27.0.tgz#e2c58a89df6432a5f36f3fa34097e9e83e709fb6" - integrity sha512-spN2H4E/wocMML7XtbKuqttHHM+zbF3bAdl9mT4/iyFaF33bowQGjxiWNWyvUJGH9F+hTgnhWziiLtwu3oC/Qg== +core-js-compat@^3.31.0: + version "3.32.1" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.32.1.tgz#55f9a7d297c0761a8eb1d31b593e0f5b6ffae964" + integrity sha512-GSvKDv4wE0bPnQtjklV101juQ85g6H3rm5PDP20mqlS5j0kXF3pP97YvAu5hl+uFHqMictp3b2VxOHljWMAtuA== dependencies: - browserslist "^4.21.4" + browserslist "^4.21.10" -core-js-pure@^3.20.2: - version "3.25.0" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.25.0.tgz#f8d1f176ff29abbfeb610110de891d5ae5a361d4" - integrity sha512-IeHpLwk3uoci37yoI2Laty59+YqH9x5uR65/yiA0ARAJrTrN4YU0rmauLWfvqOuk77SlNJXj2rM6oT/dBD87+A== +core-js-pure@^3.30.2: + version "3.32.1" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.32.1.tgz#5775b88f9062885f67b6d7edce59984e89d276f3" + integrity sha512-f52QZwkFVDPf7UEQZGHKx6NYxsxmVGJe5DIvbzOdRMJlmT6yv0KDjR8rmy3ngr/t5wU54c7Sp/qIJH0ppbhVpQ== core-js@^3.23.3: - version "3.25.0" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.25.0.tgz#be71d9e0dd648ffd70c44a7ec2319d039357eceb" - integrity sha512-CVU1xvJEfJGhyCpBrzzzU1kjCfgsGUxhEvwUV2e/cOedYWHdmluamx+knDnmhqALddMG16fZvIqvs9aijsHHaA== + version "3.32.1" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.32.1.tgz#a7d8736a3ed9dd05940c3c4ff32c591bb735be77" + integrity sha512-lqufgNn9NLnESg5mQeYsxQP5w7wrViSj0jr/kv6ECQiByzQkrn1MKvV0L3acttpDqfQrHLwr2KCMgX5b8X+lyQ== core-util-is@~1.0.0: version "1.0.3" @@ -4091,10 +4080,10 @@ cosmiconfig@^6.0.0: path-type "^4.0.0" yaml "^1.7.2" -cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" - integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== +cosmiconfig@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.1.0.tgz#1443b9afa596b670082ea46cbd8f6a62b84635f6" + integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== dependencies: "@types/parse-json" "^4.0.0" import-fresh "^3.2.1" @@ -4102,6 +4091,16 @@ cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: path-type "^4.0.0" yaml "^1.10.0" +cosmiconfig@^8.2.0: + version "8.2.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.2.0.tgz#f7d17c56a590856cd1e7cee98734dca272b0d8fd" + integrity sha512-3rTMnFJA1tCOPwRxtgF4wd7Ab2qvDbL8jX+3smjIbS4HlZBagTlpERbdN7iAbWlrfxE3M8c27kTwTawQ7st+OQ== + dependencies: + import-fresh "^3.2.1" + js-yaml "^4.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + create-require@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" @@ -4115,11 +4114,11 @@ cross-fetch@3.1.4: node-fetch "2.6.1" cross-fetch@^3.1.5: - version "3.1.5" - resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f" - integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== + version "3.1.8" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.8.tgz#0327eba65fd68a7d119f8fb2bf9334a1a7956f82" + integrity sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg== dependencies: - node-fetch "2.6.7" + node-fetch "^2.6.12" cross-spawn@^7.0.3: version "7.0.3" @@ -4136,46 +4135,46 @@ crypto-random-string@^2.0.0: integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== css-declaration-sorter@^6.3.1: - version "6.3.1" - resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.3.1.tgz#be5e1d71b7a992433fb1c542c7a1b835e45682ec" - integrity sha512-fBffmak0bPAnyqc/HO8C3n2sHrp9wcqQz6ES9koRF2/mLOVAx9zIQ3Y7R29sYCteTPqMCwns4WYQoCX91Xl3+w== + version "6.4.1" + resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.4.1.tgz#28beac7c20bad7f1775be3a7129d7eae409a3a71" + integrity sha512-rtdthzxKuyq6IzqX6jEcIzQF/YqccluefyCYheovBOLhFT/drQA9zj/UbRAa9J7C0o6EG6u3E6g+vKkay7/k3g== css-loader@^6.7.1: - version "6.7.1" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.7.1.tgz#e98106f154f6e1baf3fc3bc455cb9981c1d5fd2e" - integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw== + version "6.8.1" + resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.8.1.tgz#0f8f52699f60f5e679eab4ec0fcd68b8e8a50a88" + integrity sha512-xDAXtEVGlD0gJ07iclwWVkLoZOpEvAWaSyf6W18S2pOC//K8+qUDIx8IIT3D+HjnmkJPQeesOPv5aiUaJsCM2g== dependencies: icss-utils "^5.1.0" - postcss "^8.4.7" + postcss "^8.4.21" postcss-modules-extract-imports "^3.0.0" - postcss-modules-local-by-default "^4.0.0" + postcss-modules-local-by-default "^4.0.3" postcss-modules-scope "^3.0.0" postcss-modules-values "^4.0.0" postcss-value-parser "^4.2.0" - semver "^7.3.5" + semver "^7.3.8" css-minimizer-webpack-plugin@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-4.0.0.tgz#e11800388c19c2b7442c39cc78ac8ae3675c9605" - integrity sha512-7ZXXRzRHvofv3Uac5Y+RkWRNo0ZMlcg8e9/OtrqUYmwDWJo+qs67GvdeFrXLsFb7czKNwjQhPkM0avlIYl+1nA== + version "4.2.2" + resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-4.2.2.tgz#79f6199eb5adf1ff7ba57f105e3752d15211eb35" + integrity sha512-s3Of/4jKfw1Hj9CxEO1E5oXhQAxlayuHO2y/ML+C6I9sQ7FdzfEV6QgMLN3vI+qFsjJGIAFLKtQK7t8BOXAIyA== dependencies: cssnano "^5.1.8" - jest-worker "^27.5.1" - postcss "^8.4.13" + jest-worker "^29.1.2" + postcss "^8.4.17" schema-utils "^4.0.0" serialize-javascript "^6.0.0" source-map "^0.6.1" css-select@^4.1.3: - version "4.1.3" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.1.3.tgz#a70440f70317f2669118ad74ff105e65849c7067" - integrity sha512-gT3wBNd9Nj49rAbmtFHj1cljIAOLYSX1nZ8CB7TBO3INYckygm5B7LISU/szY//YmdiSLbJvDLOx9VnMVpMBxA== + version "4.3.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== dependencies: boolbase "^1.0.0" - css-what "^5.0.0" - domhandler "^4.2.0" - domutils "^2.6.0" - nth-check "^2.0.0" + css-what "^6.0.1" + domhandler "^4.3.1" + domutils "^2.8.0" + nth-check "^2.0.1" css-select@^5.1.0: version "5.1.0" @@ -4196,12 +4195,7 @@ css-tree@^1.1.2, css-tree@^1.1.3: mdn-data "2.0.14" source-map "^0.6.1" -css-what@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-5.1.0.tgz#3f7b707aadf633baf62c2ceb8579b545bb40f7fe" - integrity sha512-arSMRWIIFY0hV8pIxZMEfmMI47Wj3R/aWpZDDxWYCPEiOMv6tfOrnpDtgxBYPEQD4V0Y/958+1TdC3iWTFcUPw== - -css-what@^6.1.0: +css-what@^6.0.1, css-what@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== @@ -4212,33 +4206,33 @@ cssesc@^3.0.0: integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== cssnano-preset-advanced@^5.3.8: - version "5.3.9" - resolved "https://registry.yarnpkg.com/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.9.tgz#99e1cdf81a467a5e6c366cfc6d874a166c4d9a67" - integrity sha512-njnh4pp1xCsibJcEHnWZb4EEzni0ePMqPuPNyuWT4Z+YeXmsgqNuTPIljXFEXhxGsWs9183JkXgHxc1TcsahIg== + version "5.3.10" + resolved "https://registry.yarnpkg.com/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.10.tgz#25558a1fbf3a871fb6429ce71e41be7f5aca6eef" + integrity sha512-fnYJyCS9jgMU+cmHO1rPSPf9axbQyD7iUhLO5Df6O4G+fKIOMps+ZbU0PdGFejFBBZ3Pftf18fn1eG7MAPUSWQ== dependencies: autoprefixer "^10.4.12" - cssnano-preset-default "^5.2.13" + cssnano-preset-default "^5.2.14" postcss-discard-unused "^5.1.0" postcss-merge-idents "^5.1.1" postcss-reduce-idents "^5.2.0" postcss-zindex "^5.1.0" -cssnano-preset-default@^5.2.12, cssnano-preset-default@^5.2.13: - version "5.2.13" - resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.2.13.tgz#e7353b0c57975d1bdd97ac96e68e5c1b8c68e990" - integrity sha512-PX7sQ4Pb+UtOWuz8A1d+Rbi+WimBIxJTRyBdgGp1J75VU0r/HFQeLnMYgHiCAp6AR4rqrc7Y4R+1Rjk3KJz6DQ== +cssnano-preset-default@^5.2.14: + version "5.2.14" + resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.2.14.tgz#309def4f7b7e16d71ab2438052093330d9ab45d8" + integrity sha512-t0SFesj/ZV2OTylqQVOrFgEh5uanxbO6ZAdeCrNsUQ6fVuXwYTxJPNAGvGTxHbD68ldIJNec7PyYZDBrfDQ+6A== dependencies: css-declaration-sorter "^6.3.1" cssnano-utils "^3.1.0" postcss-calc "^8.2.3" - postcss-colormin "^5.3.0" + postcss-colormin "^5.3.1" postcss-convert-values "^5.1.3" postcss-discard-comments "^5.1.2" postcss-discard-duplicates "^5.1.0" postcss-discard-empty "^5.1.1" postcss-discard-overridden "^5.1.0" postcss-merge-longhand "^5.1.7" - postcss-merge-rules "^5.1.3" + postcss-merge-rules "^5.1.4" postcss-minify-font-values "^5.1.0" postcss-minify-gradients "^5.1.1" postcss-minify-params "^5.1.4" @@ -4253,7 +4247,7 @@ cssnano-preset-default@^5.2.12, cssnano-preset-default@^5.2.13: postcss-normalize-url "^5.1.0" postcss-normalize-whitespace "^5.1.1" postcss-ordered-values "^5.1.3" - postcss-reduce-initial "^5.1.1" + postcss-reduce-initial "^5.1.2" postcss-reduce-transforms "^5.1.0" postcss-svgo "^5.1.0" postcss-unique-selectors "^5.1.1" @@ -4264,11 +4258,11 @@ cssnano-utils@^3.1.0: integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== cssnano@^5.1.12, cssnano@^5.1.8: - version "5.1.13" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.1.13.tgz#83d0926e72955332dc4802a7070296e6258efc0a" - integrity sha512-S2SL2ekdEz6w6a2epXn4CmMKU4K3KpcyXLKfAYc9UQQqJRkD/2eLUG0vJ3Db/9OvO5GuAdgXw3pFbR6abqghDQ== + version "5.1.15" + resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.1.15.tgz#ded66b5480d5127fcb44dac12ea5a983755136bf" + integrity sha512-j+BKgDcLDQA+eDifLx0EO4XSA56b7uut3BQFH+wbSaSTuGLuiyTa/wbRYthUXX8LC9mLg+WWKe8h+qJuwTAbHw== dependencies: - cssnano-preset-default "^5.2.12" + cssnano-preset-default "^5.2.14" lilconfig "^2.0.3" yaml "^1.10.2" @@ -4279,15 +4273,10 @@ csso@^4.2.0: dependencies: css-tree "^1.1.2" -csstype@^3.0.10: - version "3.1.1" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" - integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== - -csstype@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.0.tgz#4ddcac3718d787cf9df0d1b7d15033925c8f29f2" - integrity sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA== +csstype@^3.0.10, csstype@^3.0.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.2.tgz#1d4bf9d572f11c14031f0436e1c10bc1f571f50b" + integrity sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ== dataloader@2.0.0: version "2.0.0" @@ -4295,9 +4284,9 @@ dataloader@2.0.0: integrity sha512-YzhyDAwA4TaQIhM5go+vCLmU0UikghC/t9DTQYZR2M/UvZ1MdOhPezSDZcjj9uqQJOMqjLcpWtyW2iNINdlatQ== dayjs@^1.11.1: - version "1.11.7" - resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.7.tgz#4b296922642f70999544d1144a2c25730fce63e2" - integrity sha512-+Yw9U6YO5TQohxLcIkrXBeY73WP3ejHWVvx8XCk3gxvQDCTEmS48ZrSZCKciI7Bhl/uCMyxYtE9UqRILmFphkQ== + version "1.11.9" + resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.9.tgz#9ca491933fadd0a60a2c19f6c237c03517d71d1a" + integrity sha512-QvzAURSbQ0pKdIye2txOzNaHmxtUBXerpY0FJsFXUMKbIZeFm5ht1LS/jFsrncjnmtv8HsG0W2g6c0zUjZWmpA== debug@2.6.9, debug@^2.6.0: version "2.6.9" @@ -4306,20 +4295,13 @@ debug@2.6.9, debug@^2.6.0: dependencies: ms "2.0.0" -debug@^4.0.0: +debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" -debug@^4.1.0, debug@^4.1.1: - version "4.3.2" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.2.tgz#f0a49c18ac8779e31d4a0c6029dfb76873c7428b" - integrity sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw== - dependencies: - ms "2.1.2" - decode-named-character-reference@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz#daabac9690874c394c81e4162a0304b35d824f0e" @@ -4347,9 +4329,9 @@ deep-extend@^0.6.0: integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== deepmerge@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" - integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + version "4.3.1" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== default-gateway@^6.0.3: version "6.0.3" @@ -4369,9 +4351,9 @@ define-lazy-prop@^2.0.0: integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== define-properties@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" - integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + version "1.2.0" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" + integrity sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA== dependencies: has-property-descriptors "^1.0.0" object-keys "^1.1.1" @@ -4428,9 +4410,9 @@ detab@2.0.4: repeat-string "^1.5.4" detect-libc@^2.0.0, detect-libc@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.1.tgz#e1897aa88fa6ad197862937fbc0441ef352ee0cd" - integrity sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w== + version "2.0.2" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.2.tgz#8ccf2ba9315350e1241b88d0ac3b0e1fbd99605d" + integrity sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw== detect-node-es@^1.1.0: version "1.1.0" @@ -4451,12 +4433,12 @@ detect-port-alt@^1.1.6: debug "^2.6.0" detect-port@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/detect-port/-/detect-port-1.3.0.tgz#d9c40e9accadd4df5cac6a782aefd014d573d1f1" - integrity sha512-E+B1gzkl2gqxt1IhUzwjrxBKRqx1UzC3WLONHinn8S3T6lwV/agVCyitiFOsGJ/eYuEUBvD71MZHy3Pv1G9doQ== + version "1.5.1" + resolved "https://registry.yarnpkg.com/detect-port/-/detect-port-1.5.1.tgz#451ca9b6eaf20451acb0799b8ab40dff7718727b" + integrity sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ== dependencies: address "^1.0.1" - debug "^2.6.0" + debug "4" diff@^4.0.1: version "4.0.2" @@ -4481,9 +4463,9 @@ dns-equal@^1.0.0: integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== dns-packet@^5.2.2: - version "5.4.0" - resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.4.0.tgz#1f88477cf9f27e78a213fb6d118ae38e759a879b" - integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g== + version "5.6.1" + resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.6.1.tgz#ae888ad425a9d1478a0674256ab866de1012cf2f" + integrity sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw== dependencies: "@leichtgewicht/ip-codec" "^2.0.1" @@ -4504,9 +4486,9 @@ docusaurus-graphql-plugin@0.5.0: url-join "^4.0.1" docusaurus-plugin-sass@^0.2.1: - version "0.2.2" - resolved "https://registry.yarnpkg.com/docusaurus-plugin-sass/-/docusaurus-plugin-sass-0.2.2.tgz#9b7f8c6fbe833677064ec05b09b98d90b50be324" - integrity sha512-ZZBpj3PrhGpYE2kAnkZB9NRwy/CDi4rGun1oec6PYR8YvGzqxYGtXvLgHi6FFbu8/N483klk8udqyYMh6Ted+A== + version "0.2.5" + resolved "https://registry.yarnpkg.com/docusaurus-plugin-sass/-/docusaurus-plugin-sass-0.2.5.tgz#6bfb8a227ac6265be685dcbc24ba1989e27b8005" + integrity sha512-Z+D0fLFUKcFpM+bqSUmqKIU+vO+YF1xoEQh5hoFreg2eMf722+siwXDD+sqtwU8E4MvVpuvsQfaHwODNlxJAEg== dependencies: sass-loader "^10.1.1" @@ -4523,9 +4505,9 @@ dom-converter@^0.2.0: utila "~0.4" dom-serializer@^1.0.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.3.2.tgz#6206437d32ceefaec7161803230c7a20bc1b4d91" - integrity sha512-5c54Bk5Dw4qAxNOI1pFEizPSjVsx5+bpJKmL2kPn8JhBUq2q09tTCa3mjijun2NfK78NMouDYNMBkOrPZiS+ig== + version "1.4.1" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== dependencies: domelementtype "^2.0.1" domhandler "^4.2.0" @@ -4545,21 +4527,21 @@ domelementtype@^2.0.1, domelementtype@^2.2.0, domelementtype@^2.3.0: resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== -domhandler@^4.0.0, domhandler@^4.2.0: - version "4.2.2" - resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.2.2.tgz#e825d721d19a86b8c201a35264e226c678ee755f" - integrity sha512-PzE9aBMsdZO8TK4BnuJwH0QT41wgMbRzuZrHUcpYncEjmQazq8QEaBWgLG7ZyC/DAZKEgglpIA6j4Qn/HmxS3w== +domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== dependencies: domelementtype "^2.2.0" -domhandler@^5.0.1, domhandler@^5.0.2, domhandler@^5.0.3: +domhandler@^5.0.2, domhandler@^5.0.3: version "5.0.3" resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-5.0.3.tgz#cc385f7f751f1d1fc650c21374804254538c7d31" integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== dependencies: domelementtype "^2.3.0" -domutils@^2.5.2, domutils@^2.6.0: +domutils@^2.5.2, domutils@^2.8.0: version "2.8.0" resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== @@ -4569,13 +4551,13 @@ domutils@^2.5.2, domutils@^2.6.0: domhandler "^4.2.0" domutils@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-3.0.1.tgz#696b3875238338cb186b6c0612bd4901c89a4f1c" - integrity sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q== + version "3.1.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-3.1.0.tgz#c47f551278d3dc4b0b1ab8cbb42d751a6f0d824e" + integrity sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA== dependencies: dom-serializer "^2.0.0" domelementtype "^2.3.0" - domhandler "^5.0.1" + domhandler "^5.0.3" dot-case@^3.0.4: version "3.0.4" @@ -4593,14 +4575,14 @@ dot-prop@^5.2.0: is-obj "^2.0.0" dotenv@^16.0.1: - version "16.0.1" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.0.1.tgz#8f8f9d94876c35dac989876a5d3a82a267fdce1d" - integrity sha512-1K6hR6wtk2FviQ4kEiSjFiH5rpzEVi8WW0x96aztHVMhEspNpc4DVOUTEHtEva5VThQ8IaBX1Pe4gSzpVVUsKQ== + version "16.3.1" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" + integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - integrity sha512-CEj8FwwNA4cVH2uFCoHUrmojhYh1vmCdOaneKJXwkeY1i9jnlslVo9dx+hQ5Hl9GnH/Bwy/IjxAyOePyPKYnzA== + version "0.1.5" + resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.5.tgz#0b5e4d7bad5de8901ea4440624c8e1d20099217e" + integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA== duplexer@^0.1.2: version "0.1.2" @@ -4617,10 +4599,10 @@ ee-first@1.1.1: resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== -electron-to-chromium@^1.4.251: - version "1.4.284" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz#61046d1e4cab3a25238f6bf7413795270f125592" - integrity sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA== +electron-to-chromium@^1.4.477: + version "1.4.508" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.508.tgz#5641ff2f5ba11df4bd960fe6a2f9f70aa8b9af96" + integrity sha512-FFa8QKjQK/A5QuFr2167myhMesGrhlOBD+3cYNxO9/S4XzHEXesyTD/1/xF644gC8buFPz3ca6G1LOQD0tZrrg== emoji-regex@^8.0.0: version "8.0.0" @@ -4654,10 +4636,10 @@ end-of-stream@^1.1.0, end-of-stream@^1.4.1: dependencies: once "^1.4.0" -enhanced-resolve@^5.10.0: - version "5.10.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6" - integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ== +enhanced-resolve@^5.15.0: + version "5.15.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" + integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -4667,10 +4649,10 @@ entities@^2.0.0: resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== -entities@^4.2.0, entities@^4.3.0, entities@^4.4.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/entities/-/entities-4.4.0.tgz#97bdaba170339446495e653cfd2db78962900174" - integrity sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA== +entities@^4.2.0, entities@^4.4.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" + integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== error-ex@^1.3.1: version "1.3.2" @@ -4679,10 +4661,10 @@ error-ex@^1.3.1: dependencies: is-arrayish "^0.2.1" -es-module-lexer@^0.9.0: - version "0.9.3" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" - integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== +es-module-lexer@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.3.0.tgz#6be9c9e0b4543a60cd166ff6f8b4e9dae0b0c16f" + integrity sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA== escalade@^3.1.1: version "3.1.1" @@ -4740,24 +4722,19 @@ estraverse@^4.1.1: integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== estraverse@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880" - integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -eta@^1.12.3: - version "1.12.3" - resolved "https://registry.yarnpkg.com/eta/-/eta-1.12.3.tgz#2982d08adfbef39f9fa50e2fbd42d7337e7338b1" - integrity sha512-qHixwbDLtekO/d51Yr4glcaUJCIjGVJyTzuqV4GPlgZo1YpgOKG+avQynErZIYrfM6JIJdtiG2Kox8tbb+DoGg== - eta@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/eta/-/eta-2.0.0.tgz#376865fadebc899e5b6dfce82fae64cbbe47e594" - integrity sha512-NqE7S2VmVwgMS8yBxsH4VgNQjNjLq1gfGU0u9I6Cjh468nPRMoDfGdK9n1p/3Dvsw3ebklDkZsFAnKJ9sefjBA== + version "2.2.0" + resolved "https://registry.yarnpkg.com/eta/-/eta-2.2.0.tgz#eb8b5f8c4e8b6306561a455e62cd7492fe3a9b8a" + integrity sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g== etag@~1.8.1: version "1.8.1" @@ -4813,13 +4790,13 @@ expand-template@^2.0.3: integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== express@^4.17.3: - version "4.18.1" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" - integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + version "4.18.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" + integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.0" + body-parser "1.20.1" content-disposition "0.5.4" content-type "~1.0.4" cookie "0.5.0" @@ -4838,7 +4815,7 @@ express@^4.17.3: parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.7" - qs "6.10.3" + qs "6.11.0" range-parser "~1.2.1" safe-buffer "5.2.1" send "0.18.0" @@ -4871,10 +4848,10 @@ fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-glob@^3.1.1, fast-glob@^3.2.11, fast-glob@^3.2.9: - version "3.2.11" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" - integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== +fast-glob@^3.1.1, fast-glob@^3.2.11, fast-glob@^3.2.9, fast-glob@^3.3.0: + version "3.3.1" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.1.tgz#784b4e897340f3dbbef17413b3f11acf03c874c4" + integrity sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -4895,9 +4872,9 @@ fast-url-parser@1.1.3: punycode "^1.3.2" fastq@^1.6.0: - version "1.13.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" - integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + version "1.15.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" + integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== dependencies: reusify "^1.0.4" @@ -4921,9 +4898,9 @@ fbjs-css-vars@^1.0.0: integrity sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ== fbjs@^3.0.0, fbjs@^3.0.1: - version "3.0.4" - resolved "https://registry.yarnpkg.com/fbjs/-/fbjs-3.0.4.tgz#e1871c6bd3083bac71ff2da868ad5067d37716c6" - integrity sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ== + version "3.0.5" + resolved "https://registry.yarnpkg.com/fbjs/-/fbjs-3.0.5.tgz#aa0edb7d5caa6340011790bd9249dbef8a81128d" + integrity sha512-ztsSx77JBtkuMrEypfhgc3cI0+0h+svqeie7xHbh1k/IKdcydnvadp/mUaGgjAOXQmQSxsqgaRhS3q9fy+1kxg== dependencies: cross-fetch "^3.1.5" fbjs-css-vars "^1.0.0" @@ -4931,7 +4908,7 @@ fbjs@^3.0.0, fbjs@^3.0.1: object-assign "^4.1.0" promise "^7.1.1" setimmediate "^1.0.5" - ua-parser-js "^0.7.30" + ua-parser-js "^1.0.35" feed@^4.2.2: version "4.2.2" @@ -5006,22 +4983,22 @@ find-up@^5.0.0: path-exists "^4.0.0" flux@^4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/flux/-/flux-4.0.3.tgz#573b504a24982c4768fdfb59d8d2ea5637d72ee7" - integrity sha512-yKAbrp7JhZhj6uiT1FTuVMlIAT1J4jqEyBpFApi1kxpGZCvacMVc/t1pMQyotqHhAgvoE3bNvAykhCo2CLjnYw== + version "4.0.4" + resolved "https://registry.yarnpkg.com/flux/-/flux-4.0.4.tgz#9661182ea81d161ee1a6a6af10d20485ef2ac572" + integrity sha512-NCj3XlayA2UsapRpM7va6wU1+9rE5FIL7qoMcmxWHRzbp0yujihMBm9BBHZ1MDIk5h5o2Bl6eGiCe8rYELAmYw== dependencies: fbemitter "^3.0.0" fbjs "^3.0.1" follow-redirects@^1.0.0, follow-redirects@^1.14.7: - version "1.15.1" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5" - integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== + version "1.15.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== fork-ts-checker-webpack-plugin@^6.5.0: - version "6.5.2" - resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz#4f67183f2f9eb8ba7df7177ce3cf3e75cdafb340" - integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA== + version "6.5.3" + resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz#eda2eff6e22476a2688d10661688c47f611b37f3" + integrity sha512-SbH/l9ikmMWycd5puHJKTkZJKddF4iRLyW3DeZ08HTI7NGyLS38MXd/KGgeWumQO7YNQbW2u/NtPT2YowbPaGQ== dependencies: "@babel/code-frame" "^7.8.3" "@types/json-schema" "^7.0.5" @@ -5052,9 +5029,9 @@ forwarded@0.2.0: integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== fraction.js@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" - integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== + version "4.3.4" + resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.4.tgz#b2bac8249a610c3396106da97c5a71da75b94b1c" + integrity sha512-pwiTgt0Q7t+GHZA4yaLjObx4vXmmdcS0iSJ19o8d/goUGgItX9UZWKWNnLHehxviD8wU2IWRsnR8cD5+yOJP2Q== fresh@0.5.2: version "0.5.2" @@ -5085,10 +5062,10 @@ fs-extra@^9.0.0, fs-extra@^9.1.0: jsonfile "^6.0.1" universalify "^2.0.0" -fs-monkey@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" - integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== +fs-monkey@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.4.tgz#ee8c1b53d3fe8bb7e5d2c5c5dfc0168afdd2f747" + integrity sha512-INM/fWAxMICjttnD0DX1rBvinKskj5G1w+oy/pnm9u/tSlnBrzFonJMcalKJ30P8RRsPzKcCG7Q8l0jx5Fh9YQ== fs.realpath@^1.0.0: version "1.0.0" @@ -5096,9 +5073,9 @@ fs.realpath@^1.0.0: integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== fsevents@~2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== function-bind@^1.1.1: version "1.1.1" @@ -5111,13 +5088,14 @@ gensync@^1.0.0-beta.1, gensync@^1.0.0-beta.2: integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== get-intrinsic@^1.0.2, get-intrinsic@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" - integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== + version "1.2.1" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.1.tgz#d295644fed4505fc9cde952c37ee12b477a83d82" + integrity sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw== dependencies: function-bind "^1.1.1" has "^1.0.3" - has-symbols "^1.0.1" + has-proto "^1.0.1" + has-symbols "^1.0.3" get-nonce@^1.0.0: version "1.0.1" @@ -5190,9 +5168,9 @@ glob@^7.0.0, glob@^7.1.3, glob@^7.1.6: path-is-absolute "^1.0.0" global-dirs@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-3.0.0.tgz#70a76fe84ea315ab37b1f5576cbde7d48ef72686" - integrity sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA== + version "3.0.1" + resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-3.0.1.tgz#0c488971f066baceda21447aecb1a8b911d22485" + integrity sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA== dependencies: ini "2.0.0" @@ -5242,13 +5220,13 @@ globby@^11.0.1, globby@^11.0.4, globby@^11.1.0: slash "^3.0.0" globby@^13.1.1: - version "13.1.2" - resolved "https://registry.yarnpkg.com/globby/-/globby-13.1.2.tgz#29047105582427ab6eca4f905200667b056da515" - integrity sha512-LKSDZXToac40u8Q1PQtZihbNdTYSNMuWe+K5l+oa6KgDzSvVrHXlJy40hUP522RjAIoNLJYBJi7ow+rbFpIhHQ== + version "13.2.2" + resolved "https://registry.yarnpkg.com/globby/-/globby-13.2.2.tgz#63b90b1bf68619c2135475cbd4e71e66aa090592" + integrity sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w== dependencies: dir-glob "^3.0.1" - fast-glob "^3.2.11" - ignore "^5.2.0" + fast-glob "^3.3.0" + ignore "^5.2.4" merge2 "^1.4.1" slash "^4.0.0" @@ -5270,9 +5248,9 @@ got@^9.6.0: url-parse-lax "^3.0.0" graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: - version "4.2.10" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" - integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== graphql-ws@^4.4.1: version "4.9.0" @@ -5280,9 +5258,9 @@ graphql-ws@^4.4.1: integrity sha512-sHkK9+lUm20/BGawNEWNtVAeJzhZeBg21VmvmLoT5NdGVeZWv5PdIhkcayQIAgjSyyQ17WMKmbDijIPG2On+Ag== graphql@^15.5.0: - version "15.6.1" - resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.6.1.tgz#9125bdf057553525da251e19e96dab3d3855ddfc" - integrity sha512-3i5lu0z6dRvJ48QP9kFxBkJ7h4Kso7PS8eahyTFz5Jm6CvQfLtNIE8LX9N6JLnXTuwR+sIYnXzaWp6anOg0QQw== + version "15.8.0" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.8.0.tgz#33410e96b012fa3bdb1091cc99a94769db212b38" + integrity sha512-5gghUc24tP9HRznNpV2+FIoq3xKkj5dTQqf4v0CpdPbFVwFkWoxOM+o+2OC9ZSvjEMTjfmG9QT+gcvggTwW1zw== gray-matter@^4.0.3: version "4.0.3" @@ -5323,7 +5301,12 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" -has-symbols@^1.0.1, has-symbols@^1.0.3: +has-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" + integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== + +has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== @@ -5448,9 +5431,9 @@ hpack.js@^2.1.6: wbuf "^1.1.0" html-entities@^2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46" - integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== + version "2.4.0" + resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.4.0.tgz#edd0cee70402584c8c76cc2c0556db09d1f45061" + integrity sha512-igBTJcNNNhvZFRtm8uA6xMY6xYleeDwn3PeBCkDz7tHttv4F2hsDI2aPgNERWzvRcNYHNT3ymRaQzllmXj4YsQ== html-minifier-terser@^6.0.2, html-minifier-terser@^6.1.0: version "6.1.0" @@ -5466,9 +5449,9 @@ html-minifier-terser@^6.0.2, html-minifier-terser@^6.1.0: terser "^5.10.0" html-tags@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.2.0.tgz#dbb3518d20b726524e4dd43de397eb0a95726961" - integrity sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg== + version "3.3.1" + resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.3.1.tgz#a04026a18c882e4bba8a01a3d39cfe465d40b5ce" + integrity sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ== html-void-elements@^1.0.0: version "1.0.5" @@ -5476,9 +5459,9 @@ html-void-elements@^1.0.0: integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== html-webpack-plugin@^5.5.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" - integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== + version "5.5.3" + resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.3.tgz#72270f4a78e222b5825b296e5e3e1328ad525a3e" + integrity sha512-6YrDKTuqaP/TquFH7h4srYWsZx+x6k6+FbsTm0ziCwGHDP78Unr1r9F/H4+sGmMbX08GQcJ+K64x55b+7VM/jg== dependencies: "@types/html-minifier-terser" "^6.0.0" html-minifier-terser "^6.0.2" @@ -5497,14 +5480,14 @@ htmlparser2@^6.1.0: entities "^2.0.0" htmlparser2@^8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-8.0.1.tgz#abaa985474fcefe269bc761a779b544d7196d010" - integrity sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA== + version "8.0.2" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-8.0.2.tgz#f002151705b383e62433b5cf466f5b716edaec21" + integrity sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA== dependencies: domelementtype "^2.3.0" - domhandler "^5.0.2" + domhandler "^5.0.3" domutils "^3.0.1" - entities "^4.3.0" + entities "^4.4.0" http-cache-semantics@^4.0.0: version "4.1.1" @@ -5538,9 +5521,9 @@ http-errors@~1.6.2: statuses ">= 1.4.0 < 2" http-parser-js@>=0.5.1: - version "0.5.3" - resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.3.tgz#01d2709c79d41698bb01d4decc5e9da4e4a033d9" - integrity sha512-t7hjvef/5HEK7RWTdUzVUhl8zkEu+LlaE0IYzdMuvbSDipxBRpOn4Uhw8ZyECEa808iVT8XCjzo6xmYt4CiLZg== + version "0.5.8" + resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" + integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== http-proxy-middleware@^2.0.3: version "2.0.6" @@ -5584,10 +5567,10 @@ ieee754@^1.1.13: resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -ignore@^5.1.4, ignore@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" - integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== +ignore@^5.1.4, ignore@^5.2.0, ignore@^5.2.4: + version "5.2.4" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== image-size@^1.0.1: version "1.0.2" @@ -5597,9 +5580,14 @@ image-size@^1.0.1: queue "6.0.2" immer@^9.0.7: - version "9.0.15" - resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.15.tgz#0b9169e5b1d22137aba7d43f8a81a495dd1b62dc" - integrity sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ== + version "9.0.21" + resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.21.tgz#1e025ea31a40f24fb064f1fef23e931496330176" + integrity sha512-bc4NBHqOqSfRW7POMkHd51LvClaeMXpm8dx0e8oE2GORbq5aRK7Bxl4FyzVLdGtLmvLKL7BTDBG5ACQm4HWjTA== + +immutable@^4.0.0: + version "4.3.4" + resolved "https://registry.yarnpkg.com/immutable/-/immutable-4.3.4.tgz#2e07b33837b4bb7662f288c244d1ced1ef65a78f" + integrity sha512-fsXeu4J4i6WNWSikpI88v/PcVflZz+6kMhUfIwc5SY+poQRPnaf5V7qds6SUyUN3cVxEzuCab7QIoLOQ+DQ1wA== import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.3.0: version "3.3.0" @@ -5631,10 +5619,10 @@ indent-string@^4.0.0: resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== -infima@0.2.0-alpha.42: - version "0.2.0-alpha.42" - resolved "https://registry.yarnpkg.com/infima/-/infima-0.2.0-alpha.42.tgz#f6e86a655ad40877c6b4d11b2ede681eb5470aa5" - integrity sha512-ift8OXNbQQwtbIt6z16KnSWP7uJ/SysSMFI4F87MNRTicypfl4Pv3E2OGVv6N3nSZFJvA8imYulCBS64iyHYww== +infima@0.2.0-alpha.43: + version "0.2.0-alpha.43" + resolved "https://registry.yarnpkg.com/infima/-/infima-0.2.0-alpha.43.tgz#f7aa1d7b30b6c08afef441c726bac6150228cbe0" + integrity sha512-2uw57LvUqW0rK/SWYnd/2rRfxNA5DDNOh33jxF7fy46VWoNhGxiUQyVZHbBMjQ33mQem0cjdDVwgWVAmlRfgyQ== inflight@^1.0.4: version "1.0.6" @@ -5687,9 +5675,9 @@ ipaddr.js@1.9.1: integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== ipaddr.js@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" - integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + version "2.1.0" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.1.0.tgz#2119bc447ff8c257753b196fc5f1ce08a4cdf39f" + integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== is-alphabetical@1.0.4, is-alphabetical@^1.0.0: version "1.0.4" @@ -5733,10 +5721,10 @@ is-ci@^2.0.0: dependencies: ci-info "^2.0.0" -is-core-module@^2.2.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.0.tgz#0321336c3d0925e497fd97f5d95cb114a5ccd548" - integrity sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw== +is-core-module@^2.13.0: + version "2.13.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.0.tgz#bb52aa6e2cbd49a30c2ba68c42bf3435ba6072db" + integrity sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ== dependencies: has "^1.0.3" @@ -5926,7 +5914,19 @@ iterall@^1.2.1: resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea" integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg== -jest-worker@^27.4.5, jest-worker@^27.5.1: +jest-util@^29.6.3: + version "29.6.3" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.6.3.tgz#e15c3eac8716440d1ed076f09bc63ace1aebca63" + integrity sha512-QUjna/xSy4B32fzcKTSz1w7YYzgiHrjjJjevdRf61HYk998R5vVMMNmrHESYZVDS5DSWs+1srPLPKxXPkeSDOA== + dependencies: + "@jest/types" "^29.6.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-worker@^27.4.5: version "27.5.1" resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== @@ -5935,15 +5935,30 @@ jest-worker@^27.4.5, jest-worker@^27.5.1: merge-stream "^2.0.0" supports-color "^8.0.0" +jest-worker@^29.1.2: + version "29.6.4" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.6.4.tgz#f34279f4afc33c872b470d4af21b281ac616abd3" + integrity sha512-6dpvFV4WjcWbDVGgHTWo/aupl8/LbBx2NSKfiwqf79xC/yeJjKHT1+StcKy/2KTmW16hE68ccKVOtXf+WZGz7Q== + dependencies: + "@types/node" "*" + jest-util "^29.6.3" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jiti@^1.18.2: + version "1.19.3" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.19.3.tgz#ef554f76465b3c2b222dc077834a71f0d4a37569" + integrity sha512-5eEbBDQT/jF1xg6l36P+mWGGoH9Spuy0PCdSr2dtWRDGC6ph/w9ZCL4lmESW8f8F7MwT3XKescfP0wnZWAKL9w== + joi@^17.4.0, joi@^17.6.0: - version "17.6.0" - resolved "https://registry.yarnpkg.com/joi/-/joi-17.6.0.tgz#0bb54f2f006c09a96e75ce687957bd04290054b2" - integrity sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw== + version "17.10.1" + resolved "https://registry.yarnpkg.com/joi/-/joi-17.10.1.tgz#f908ee1617137cca5d83b91587cde80e472b5753" + integrity sha512-vIiDxQKmRidUVp8KngT8MZSOcmRVm2zV7jbMjNYWuHcJWI0bUck3nRTGQjhpPlQenIQIBC5Vp9AhcnHbWQqafw== dependencies: "@hapi/hoek" "^9.0.0" "@hapi/topo" "^5.0.0" "@sideway/address" "^4.1.3" - "@sideway/formula" "^3.0.0" + "@sideway/formula" "^3.0.1" "@sideway/pinpoint" "^2.0.0" "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: @@ -6003,7 +6018,7 @@ json2mq@^0.2.0: dependencies: string-convert "^0.2.0" -json5@^2.1.2, json5@^2.2.1: +json5@^2.1.2, json5@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== @@ -6039,10 +6054,10 @@ kleur@^4.0.3: resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== -klona@^2.0.4, klona@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc" - integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== +klona@^2.0.4: + version "2.0.6" + resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.6.tgz#85bffbf819c03b2f53270412420a4555ef882e22" + integrity sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA== latest-version@^5.1.0: version "5.1.0" @@ -6051,25 +6066,33 @@ latest-version@^5.1.0: dependencies: package-json "^6.3.0" +launch-editor@^2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.6.0.tgz#4c0c1a6ac126c572bd9ff9a30da1d2cae66defd7" + integrity sha512-JpDCcQnyAAzZZaZ7vEiSqL690w7dAEyLao+KC96zBplnYbJS7TYNjvM3M7y3dGz+v7aIsJk3hllWuc0kWAjyRQ== + dependencies: + picocolors "^1.0.0" + shell-quote "^1.7.3" + leven@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== lilconfig@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.3.tgz#68f3005e921dafbd2a2afb48379986aa6d2579fd" - integrity sha512-EHKqr/+ZvdKCifpNrJCKxBTgk5XupZA3y/aCPY9mxfgBzmgh93Mt/WqjjQ38oMxXuvDokaKiM3lAgvSH2sjtHg== + version "2.1.0" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" + integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== lines-and-columns@^1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" - integrity sha512-8ZmlJFVK9iCmtLz19HpSsR8HaAMWBT284VMNednLwlIMDP2hJDCIhUp0IZ2xUcZ+Ob6BM0VvCSJwzASDM45NLQ== + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== loader-runner@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.2.0.tgz#d7022380d66d14c5fb1d496b89864ebcfd478384" - integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== + version "4.3.0" + resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== loader-utils@^2.0.0: version "2.0.4" @@ -6107,6 +6130,11 @@ locate-path@^6.0.0: dependencies: p-locate "^5.0.0" +lodash.camelcase@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" + integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== + lodash.curry@^4.0.1: version "4.1.1" resolved "https://registry.yarnpkg.com/lodash.curry/-/lodash.curry-4.1.1.tgz#248e36072ede906501d75966200a86dab8b23170" @@ -6117,22 +6145,47 @@ lodash.debounce@^4.0.8: resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== +lodash.escape@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lodash.escape/-/lodash.escape-4.0.1.tgz#c9044690c21e04294beaa517712fded1fa88de98" + integrity sha512-nXEOnb/jK9g0DYMr1/Xvq6l5xMD7GDG55+GSYIYmS0G4tBk/hURD4JR9WCavs04t33WmJx9kCyp9vJ+mr4BOUw== + +lodash.flatten@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" + integrity sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g== + lodash.flow@^3.3.0: version "3.5.0" resolved "https://registry.yarnpkg.com/lodash.flow/-/lodash.flow-3.5.0.tgz#87bf40292b8cf83e4e8ce1a3ae4209e20071675a" integrity sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw== +lodash.invokemap@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/lodash.invokemap/-/lodash.invokemap-4.6.0.tgz#1748cda5d8b0ef8369c4eb3ec54c21feba1f2d62" + integrity sha512-CfkycNtMqgUlfjfdh2BhKO/ZXrP8ePOX5lEU/g0R3ItJcnuxWDwokMGKx1hWcfOikmyOVx6X9IwWnDGlgKl61w== + lodash.memoize@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== +lodash.pullall@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lodash.pullall/-/lodash.pullall-4.2.0.tgz#9d98b8518b7c965b0fae4099bd9fb7df8bbf38ba" + integrity sha512-VhqxBKH0ZxPpLhiu68YD1KnHmbhQJQctcipvmFnqIBDYzcIHzf3Zpu0tpeOKtR4x76p9yohc506eGdOjTmyIBg== + lodash.uniq@4.5.0, lodash.uniq@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== -lodash@4.17.21, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: +lodash.uniqby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.uniqby/-/lodash.uniqby-4.7.0.tgz#d99c07a669e9e6d24e1362dfe266c67616af1302" + integrity sha512-e/zcLx6CSbmaEgFHCA7BnoQKyCtKMxnuWrJygbwPs/AIn+IMKl66L8/s+wBUn5LRw2pZx3bUHibiV1b6aTWIww== + +lodash@4.17.21, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -6251,9 +6304,9 @@ mdast-util-find-and-replace@^2.0.0: unist-util-visit-parents "^5.0.0" mdast-util-from-markdown@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.0.tgz#0214124154f26154a2b3f9d401155509be45e894" - integrity sha512-HN3W1gRIuN/ZW295c7zi7g9lVBllMgZE40RxCX37wrTPWXCWtpvOZdfnuK+1WNpvZje6XuJeI3Wnb4TJEUem+g== + version "1.3.1" + resolved "https://registry.yarnpkg.com/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz#9421a5a247f10d31d2faed2a30df5ec89ceafcf0" + integrity sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww== dependencies: "@types/mdast" "^3.0.0" "@types/unist" "^2.0.0" @@ -6404,11 +6457,11 @@ media-typer@0.3.0: integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== memfs@^3.1.2, memfs@^3.4.3: - version "3.4.7" - resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.4.7.tgz#e5252ad2242a724f938cb937e3c4f7ceb1f70e5a" - integrity sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw== + version "3.6.0" + resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.6.0.tgz#d7a2110f86f79dd950a8b6df6d57bc984aa185f6" + integrity sha512-EGowvkkgbMcIChjMTMkESFDbZeSh8xZ7kNSF0hAiAN4Jh6jgHCRS0Ga/+C8y6Au+oqpezRHCfPsmJ2+DwAgiwQ== dependencies: - fs-monkey "^1.0.3" + fs-monkey "^1.0.4" merge-descriptors@1.0.1: version "1.0.1" @@ -6436,9 +6489,9 @@ methods@~1.1.2: integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== micromark-core-commonmark@^1.0.0, micromark-core-commonmark@^1.0.1: - version "1.0.6" - resolved "https://registry.yarnpkg.com/micromark-core-commonmark/-/micromark-core-commonmark-1.0.6.tgz#edff4c72e5993d93724a3c206970f5a15b0585ad" - integrity sha512-K+PkJTxqjFfSNkfAhp4GB+cZPfQd6dxtTXnf+RjZOV7T4EEXnvgzOcnp+eSTmpGk9d1S9sL6/lqrgSNn/s0HZA== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz#1386628df59946b2d39fb2edfd10f3e8e0a75bb8" + integrity sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw== dependencies: decode-named-character-reference "^1.0.0" micromark-factory-destination "^1.0.0" @@ -6458,20 +6511,19 @@ micromark-core-commonmark@^1.0.0, micromark-core-commonmark@^1.0.1: uvu "^0.5.0" micromark-extension-gfm-autolink-literal@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.3.tgz#dc589f9c37eaff31a175bab49f12290edcf96058" - integrity sha512-i3dmvU0htawfWED8aHMMAzAVp/F0Z+0bPh3YrbTPPL1v4YAlCZpy5rBO5p0LPYiZo0zFVkoYh7vDU7yQSiCMjg== + version "1.0.5" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.5.tgz#5853f0e579bbd8ef9e39a7c0f0f27c5a063a66e7" + integrity sha512-z3wJSLrDf8kRDOh2qBtoTRD53vJ+CWIyo7uyZuxf/JAbNJjiHsOpG1y5wxk8drtv3ETAHutCu6N3thkOOgueWg== dependencies: micromark-util-character "^1.0.0" micromark-util-sanitize-uri "^1.0.0" micromark-util-symbol "^1.0.0" micromark-util-types "^1.0.0" - uvu "^0.5.0" micromark-extension-gfm-footnote@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-1.1.0.tgz#73e3db823db9defef25f68074cb4cf4bb9cf6a8c" - integrity sha512-RWYce7j8+c0n7Djzv5NzGEGitNNYO3uj+h/XYMdS/JinH1Go+/Qkomg/rfxExFzYTiydaV6GLeffGO5qcJbMPA== + version "1.1.2" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-1.1.2.tgz#05e13034d68f95ca53c99679040bc88a6f92fe2e" + integrity sha512-Yxn7z7SxgyGWRNa4wzf8AhYYWNrwl5q1Z8ii+CSTTIqVkmGZF1CElX2JI8g5yGoM3GAman9/PVCUFUSJ0kB/8Q== dependencies: micromark-core-commonmark "^1.0.0" micromark-factory-space "^1.0.0" @@ -6483,9 +6535,9 @@ micromark-extension-gfm-footnote@^1.0.0: uvu "^0.5.0" micromark-extension-gfm-strikethrough@^1.0.0: - version "1.0.5" - resolved "https://registry.yarnpkg.com/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.5.tgz#4db40b87d674a6fe1d00d59ac91118e4f5960f12" - integrity sha512-X0oI5eYYQVARhiNfbETy7BfLSmSilzN1eOuoRnrf9oUNsPRrWOAe9UqSizgw1vNxQBfOwL+n2610S3bYjVNi7w== + version "1.0.7" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.7.tgz#c8212c9a616fa3bf47cb5c711da77f4fdc2f80af" + integrity sha512-sX0FawVE1o3abGk3vRjOH50L5TTLr3b5XMqnP9YDRb34M0v5OoZhG+OHFz1OffZ9dlwgpTBKaT4XW/AsUVnSDw== dependencies: micromark-util-chunked "^1.0.0" micromark-util-classify-character "^1.0.0" @@ -6495,9 +6547,9 @@ micromark-extension-gfm-strikethrough@^1.0.0: uvu "^0.5.0" micromark-extension-gfm-table@^1.0.0: - version "1.0.5" - resolved "https://registry.yarnpkg.com/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.5.tgz#7b708b728f8dc4d95d486b9e7a2262f9cddbcbb4" - integrity sha512-xAZ8J1X9W9K3JTJTUL7G6wSKhp2ZYHrFk5qJgY/4B33scJzE2kpfRL6oiw/veJTbt7jiM/1rngLlOKPWr1G+vg== + version "1.0.7" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.7.tgz#dcb46074b0c6254c3fc9cc1f6f5002c162968008" + integrity sha512-3ZORTHtcSnMQEKtAOsBQ9/oHp9096pI/UvdPtN7ehKvrmZZ2+bbWhi0ln+I9drmwXMt5boocn6OlwQzNXeVeqw== dependencies: micromark-factory-space "^1.0.0" micromark-util-character "^1.0.0" @@ -6513,9 +6565,9 @@ micromark-extension-gfm-tagfilter@^1.0.0: micromark-util-types "^1.0.0" micromark-extension-gfm-task-list-item@^1.0.0: - version "1.0.4" - resolved "https://registry.yarnpkg.com/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.4.tgz#4b66d87847de40cef2b5ceddb9f9629a6dfe7472" - integrity sha512-9XlIUUVnYXHsFF2HZ9jby4h3npfX10S1coXTnV035QGPgrtNYQq3J6IfIvcCIUAJrrqBVi5BqA/LmaOMJqPwMQ== + version "1.0.5" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.5.tgz#b52ce498dc4c69b6a9975abafc18f275b9dde9f4" + integrity sha512-RMFXl2uQ0pNQy6Lun2YBYT9g9INXtWJULgbt01D/x8/6yJ2qpKyzdZD3pi6UIkzF++Da49xAelVKUeUMqd5eIQ== dependencies: micromark-factory-space "^1.0.0" micromark-util-character "^1.0.0" @@ -6524,9 +6576,9 @@ micromark-extension-gfm-task-list-item@^1.0.0: uvu "^0.5.0" micromark-extension-gfm@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/micromark-extension-gfm/-/micromark-extension-gfm-2.0.1.tgz#40f3209216127a96297c54c67f5edc7ef2d1a2a2" - integrity sha512-p2sGjajLa0iYiGQdT0oelahRYtMWvLjy8J9LOCxzIQsllMCGLbsLW+Nc+N4vi02jcRJvedVJ68cjelKIO6bpDA== + version "2.0.3" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm/-/micromark-extension-gfm-2.0.3.tgz#e517e8579949a5024a493e49204e884aa74f5acf" + integrity sha512-vb9OoHqrhCmbRidQv/2+Bc6pkP0FrtlhurxZofvOEy5o8RtuuvTq+RQ1Vw5ZDNrVraQZu3HixESqbG+0iKk/MQ== dependencies: micromark-extension-gfm-autolink-literal "^1.0.0" micromark-extension-gfm-footnote "^1.0.0" @@ -6538,18 +6590,18 @@ micromark-extension-gfm@^2.0.0: micromark-util-types "^1.0.0" micromark-factory-destination@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/micromark-factory-destination/-/micromark-factory-destination-1.0.0.tgz#fef1cb59ad4997c496f887b6977aa3034a5a277e" - integrity sha512-eUBA7Rs1/xtTVun9TmV3gjfPz2wEwgK5R5xcbIM5ZYAtvGF6JkyaDsj0agx8urXnO31tEO6Ug83iVH3tdedLnw== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz#eb815957d83e6d44479b3df640f010edad667b9f" + integrity sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg== dependencies: micromark-util-character "^1.0.0" micromark-util-symbol "^1.0.0" micromark-util-types "^1.0.0" micromark-factory-label@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/micromark-factory-label/-/micromark-factory-label-1.0.2.tgz#6be2551fa8d13542fcbbac478258fb7a20047137" - integrity sha512-CTIwxlOnU7dEshXDQ+dsr2n+yxpP0+fn271pu0bwDIS8uqfFcumXpj5mLn3hSC8iw2MUr6Gx8EcKng1dD7i6hg== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz#cc95d5478269085cfa2a7282b3de26eb2e2dec68" + integrity sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w== dependencies: micromark-util-character "^1.0.0" micromark-util-symbol "^1.0.0" @@ -6557,28 +6609,27 @@ micromark-factory-label@^1.0.0: uvu "^0.5.0" micromark-factory-space@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/micromark-factory-space/-/micromark-factory-space-1.0.0.tgz#cebff49968f2b9616c0fcb239e96685cb9497633" - integrity sha512-qUmqs4kj9a5yBnk3JMLyjtWYN6Mzfcx8uJfi5XAveBniDevmZasdGBba5b4QsvRcAkmvGo5ACmSUmyGiKTLZew== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz#c8f40b0640a0150751d3345ed885a080b0d15faf" + integrity sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ== dependencies: micromark-util-character "^1.0.0" micromark-util-types "^1.0.0" micromark-factory-title@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/micromark-factory-title/-/micromark-factory-title-1.0.2.tgz#7e09287c3748ff1693930f176e1c4a328382494f" - integrity sha512-zily+Nr4yFqgMGRKLpTVsNl5L4PMu485fGFDOQJQBl2NFpjGte1e86zC0da93wf97jrc4+2G2GQudFMHn3IX+A== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz#dd0fe951d7a0ac71bdc5ee13e5d1465ad7f50ea1" + integrity sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ== dependencies: micromark-factory-space "^1.0.0" micromark-util-character "^1.0.0" micromark-util-symbol "^1.0.0" micromark-util-types "^1.0.0" - uvu "^0.5.0" micromark-factory-whitespace@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/micromark-factory-whitespace/-/micromark-factory-whitespace-1.0.0.tgz#e991e043ad376c1ba52f4e49858ce0794678621c" - integrity sha512-Qx7uEyahU1lt1RnsECBiuEbfr9INjQTGa6Err+gF3g0Tx4YEviPbqqGKNv/NrBaE7dVHdn1bVZKM/n5I/Bak7A== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz#798fb7489f4c8abafa7ca77eed6b5745853c9705" + integrity sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ== dependencies: micromark-factory-space "^1.0.0" micromark-util-character "^1.0.0" @@ -6586,48 +6637,48 @@ micromark-factory-whitespace@^1.0.0: micromark-util-types "^1.0.0" micromark-util-character@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-1.1.0.tgz#d97c54d5742a0d9611a68ca0cd4124331f264d86" - integrity sha512-agJ5B3unGNJ9rJvADMJ5ZiYjBRyDpzKAOk01Kpi1TKhlT1APx3XZk6eN7RtSz1erbWHC2L8T3xLZ81wdtGRZzg== + version "1.2.0" + resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-1.2.0.tgz#4fedaa3646db249bc58caeb000eb3549a8ca5dcc" + integrity sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg== dependencies: micromark-util-symbol "^1.0.0" micromark-util-types "^1.0.0" micromark-util-chunked@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-chunked/-/micromark-util-chunked-1.0.0.tgz#5b40d83f3d53b84c4c6bce30ed4257e9a4c79d06" - integrity sha512-5e8xTis5tEZKgesfbQMKRCyzvffRRUX+lK/y+DvsMFdabAicPkkZV6gO+FEWi9RfuKKoxxPwNL+dFF0SMImc1g== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz#37a24d33333c8c69a74ba12a14651fd9ea8a368b" + integrity sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ== dependencies: micromark-util-symbol "^1.0.0" micromark-util-classify-character@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-classify-character/-/micromark-util-classify-character-1.0.0.tgz#cbd7b447cb79ee6997dd274a46fc4eb806460a20" - integrity sha512-F8oW2KKrQRb3vS5ud5HIqBVkCqQi224Nm55o5wYLzY/9PwHGXC01tr3d7+TqHHz6zrKQ72Okwtvm/xQm6OVNZA== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz#6a7f8c8838e8a120c8e3c4f2ae97a2bff9190e9d" + integrity sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw== dependencies: micromark-util-character "^1.0.0" micromark-util-symbol "^1.0.0" micromark-util-types "^1.0.0" micromark-util-combine-extensions@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.0.0.tgz#91418e1e74fb893e3628b8d496085639124ff3d5" - integrity sha512-J8H058vFBdo/6+AsjHp2NF7AJ02SZtWaVUjsayNFeAiydTxUwViQPxN0Hf8dp4FmCQi0UUFovFsEyRSUmFH3MA== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz#192e2b3d6567660a85f735e54d8ea6e3952dbe84" + integrity sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA== dependencies: micromark-util-chunked "^1.0.0" micromark-util-types "^1.0.0" micromark-util-decode-numeric-character-reference@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.0.0.tgz#dcc85f13b5bd93ff8d2868c3dba28039d490b946" - integrity sha512-OzO9AI5VUtrTD7KSdagf4MWgHMtET17Ua1fIpXTpuhclCqD8egFWo85GxSGvxgkGS74bEahvtM0WP0HjvV0e4w== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz#b1e6e17009b1f20bc652a521309c5f22c85eb1c6" + integrity sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw== dependencies: micromark-util-symbol "^1.0.0" micromark-util-decode-string@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/micromark-util-decode-string/-/micromark-util-decode-string-1.0.2.tgz#942252ab7a76dec2dbf089cc32505ee2bc3acf02" - integrity sha512-DLT5Ho02qr6QWVNYbRZ3RYOSSWWFuH3tJexd3dgN1odEuPNxCngTCXJum7+ViRAd9BbdxCvMToPOD/IvVhzG6Q== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz#dc12b078cba7a3ff690d0203f95b5d5537f2809c" + integrity sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ== dependencies: decode-named-character-reference "^1.0.0" micromark-util-character "^1.0.0" @@ -6635,42 +6686,42 @@ micromark-util-decode-string@^1.0.0: micromark-util-symbol "^1.0.0" micromark-util-encode@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/micromark-util-encode/-/micromark-util-encode-1.0.1.tgz#2c1c22d3800870ad770ece5686ebca5920353383" - integrity sha512-U2s5YdnAYexjKDel31SVMPbfi+eF8y1U4pfiRW/Y8EFVCy/vgxk/2wWTxzcqE71LHtCuCzlBDRU2a5CQ5j+mQA== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz#92e4f565fd4ccb19e0dcae1afab9a173bbeb19a5" + integrity sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw== micromark-util-html-tag-name@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.1.0.tgz#eb227118befd51f48858e879b7a419fc0df20497" - integrity sha512-BKlClMmYROy9UiV03SwNmckkjn8QHVaWkqoAqzivabvdGcwNGMMMH/5szAnywmsTBUzDsU57/mFi0sp4BQO6dA== + version "1.2.0" + resolved "https://registry.yarnpkg.com/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz#48fd7a25826f29d2f71479d3b4e83e94829b3588" + integrity sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q== micromark-util-normalize-identifier@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.0.0.tgz#4a3539cb8db954bbec5203952bfe8cedadae7828" - integrity sha512-yg+zrL14bBTFrQ7n35CmByWUTFsgst5JhA4gJYoty4Dqzj4Z4Fr/DHekSS5aLfH9bdlfnSvKAWsAgJhIbogyBg== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz#7a73f824eb9f10d442b4d7f120fecb9b38ebf8b7" + integrity sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q== dependencies: micromark-util-symbol "^1.0.0" micromark-util-resolve-all@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-resolve-all/-/micromark-util-resolve-all-1.0.0.tgz#a7c363f49a0162e931960c44f3127ab58f031d88" - integrity sha512-CB/AGk98u50k42kvgaMM94wzBqozSzDDaonKU7P7jwQIuH2RU0TeBqGYJz2WY1UdihhjweivStrJ2JdkdEmcfw== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz#4652a591ee8c8fa06714c9b54cd6c8e693671188" + integrity sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA== dependencies: micromark-util-types "^1.0.0" micromark-util-sanitize-uri@^1.0.0, micromark-util-sanitize-uri@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.1.0.tgz#f12e07a85106b902645e0364feb07cf253a85aee" - integrity sha512-RoxtuSCX6sUNtxhbmsEFQfWzs8VN7cTctmBPvYivo98xb/kDEoTCtJQX5wyzIYEmk/lvNFTat4hL8oW0KndFpg== + version "1.2.0" + resolved "https://registry.yarnpkg.com/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz#613f738e4400c6eedbc53590c67b197e30d7f90d" + integrity sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A== dependencies: micromark-util-character "^1.0.0" micromark-util-encode "^1.0.0" micromark-util-symbol "^1.0.0" micromark-util-subtokenize@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/micromark-util-subtokenize/-/micromark-util-subtokenize-1.0.2.tgz#ff6f1af6ac836f8bfdbf9b02f40431760ad89105" - integrity sha512-d90uqCnXp/cy4G881Ub4psE57Sf8YD0pim9QdjCRNjfas2M1u6Lbt+XZK9gnHL2XFhnozZiEdCa9CNfXSfQ6xA== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz#941c74f93a93eaf687b9054aeb94642b0e92edb1" + integrity sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A== dependencies: micromark-util-chunked "^1.0.0" micromark-util-symbol "^1.0.0" @@ -6678,19 +6729,19 @@ micromark-util-subtokenize@^1.0.0: uvu "^0.5.0" micromark-util-symbol@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-1.0.1.tgz#b90344db62042ce454f351cf0bebcc0a6da4920e" - integrity sha512-oKDEMK2u5qqAptasDAwWDXq0tG9AssVwAx3E9bBF3t/shRIGsWIRG+cGafs2p/SnDSOecnt6hZPCE2o6lHfFmQ== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz#813cd17837bdb912d069a12ebe3a44b6f7063142" + integrity sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag== micromark-util-types@^1.0.0, micromark-util-types@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-1.0.2.tgz#f4220fdb319205812f99c40f8c87a9be83eded20" - integrity sha512-DCfg/T8fcrhrRKTPjRrw/5LLvdGV7BHySf/1LOZx7TzWZdYRjogNtyNq885z3nNallwr3QUKARjqvHqX1/7t+w== + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-1.1.0.tgz#e6676a8cae0bb86a2171c498167971886cb7e283" + integrity sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg== micromark@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/micromark/-/micromark-3.1.0.tgz#eeba0fe0ac1c9aaef675157b52c166f125e89f62" - integrity sha512-6Mj0yHLdUZjHnOPgr5xfWIMqMWS12zDN6iws9SLuSz76W8jTtAv24MN4/CL7gJrl5vtxGInkkqDv/JIoRsQOvA== + version "3.2.0" + resolved "https://registry.yarnpkg.com/micromark/-/micromark-3.2.0.tgz#1af9fef3f995ea1ea4ac9c7e2f19c48fd5c006e9" + integrity sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA== dependencies: "@types/debug" "^4.0.0" debug "^4.0.0" @@ -6747,11 +6798,6 @@ mime@1.6.0: resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== -mime@^2.3.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/mime/-/mime-2.5.2.tgz#6e3dc6cc2b9510643830e5f19d5cb753da5eeabe" - integrity sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg== - mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -6767,18 +6813,10 @@ mimic-response@^3.1.0: resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== -mini-create-react-context@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz#072171561bfdc922da08a60c2197a497cc2d1d5e" - integrity sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ== - dependencies: - "@babel/runtime" "^7.12.1" - tiny-warning "^1.0.3" - mini-css-extract-plugin@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz#9a1251d15f2035c342d99a468ab9da7a0451b71e" - integrity sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg== + version "2.7.6" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.7.6.tgz#282a3d38863fddcd2e0c220aaed5b90bc156564d" + integrity sha512-Qk7HcgaPkGG6eD77mLvZS1nmxlao3j+9PkrT9Uc7HAE1id3F41+DdBRYRYkbyfNRGzm8/YWtzhw7nVPmwhqTQw== dependencies: schema-utils "^4.0.0" @@ -6795,9 +6833,9 @@ minimatch@3.1.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1: brace-expansion "^1.1.7" minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5: - version "1.2.6" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + version "1.2.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: version "0.5.3" @@ -6809,6 +6847,11 @@ mri@^1.1.0: resolved "https://registry.yarnpkg.com/mri/-/mri-1.2.0.tgz#6721480fec2a11a4889861115a48b6cbe7cc8f0b" integrity sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA== +mrmime@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mrmime/-/mrmime-1.0.1.tgz#5f90c825fad4bdd41dc914eff5d1a8cfdaf24f27" + integrity sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw== + ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" @@ -6832,10 +6875,10 @@ multicast-dns@^7.2.5: dns-packet "^5.2.2" thunky "^1.0.2" -nanoid@^3.3.4: - version "3.3.4" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" - integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== +nanoid@^3.3.6: + version "3.3.6" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c" + integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== napi-build-utils@^1.0.1: version "1.0.2" @@ -6861,16 +6904,16 @@ no-case@^3.0.4: tslib "^2.0.3" node-abi@^3.3.0: - version "3.24.0" - resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.24.0.tgz#b9d03393a49f2c7e147d0c99f180e680c27c1599" - integrity sha512-YPG3Co0luSu6GwOBsmIdGW6Wx0NyNDLg/hriIyDllVsNwnI6UeqaWShxC3lbH4LtEQUgoLP3XR1ndXiDAWvmRw== + version "3.47.0" + resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.47.0.tgz#6cbfa2916805ae25c2b7156ca640131632eb05e8" + integrity sha512-2s6B2CWZM//kPgwnuI0KrYwNjfdByE25zvAaEpq9IH4zcNsarH8Ihu/UuX6XMPEogDAxkuUFeZn60pXNHAqn3A== dependencies: semver "^7.3.5" node-addon-api@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-5.0.0.tgz#7d7e6f9ef89043befdb20c1989c905ebde18c501" - integrity sha512-CvkDw2OEnme7ybCykJpVcKH+uAOLV2qLqiyla128dN9TkEWfrYmxG6C2boDe5KcNQqZF3orkqzGgOMvZ/JNekA== + version "5.1.0" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-5.1.0.tgz#49da1ca055e109a23d537e9de43c09cca21eb762" + integrity sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA== node-emoji@^1.10.0: version "1.11.0" @@ -6884,10 +6927,10 @@ node-fetch@2.6.1: resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== -node-fetch@2.6.7, node-fetch@^2.6.1: - version "2.6.7" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" - integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== +node-fetch@^2.6.1, node-fetch@^2.6.12, node-fetch@^2.6.7: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== dependencies: whatwg-url "^5.0.0" @@ -6896,10 +6939,10 @@ node-forge@^1: resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== -node-releases@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" - integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== +node-releases@^2.0.13: + version "2.0.13" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.13.tgz#d5ed1627c23e3461e819b02e57b75e4899b1c81d" + integrity sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ== normalize-path@^2.1.1: version "2.1.1" @@ -6940,7 +6983,7 @@ nprogress@^0.2.0: resolved "https://registry.yarnpkg.com/nprogress/-/nprogress-0.2.0.tgz#cb8f34c53213d895723fcbab907e9422adbcafb1" integrity sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA== -nth-check@^2.0.0, nth-check@^2.0.1: +nth-check@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== @@ -6953,9 +6996,9 @@ object-assign@^4.1.0, object-assign@^4.1.1: integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== object-inspect@^1.9.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.11.0.tgz#9dceb146cedd4148a0d9e51ab88d34cf509922b1" - integrity sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg== + version "1.12.3" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" + integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== object-keys@^1.1.1: version "1.1.1" @@ -7004,9 +7047,9 @@ onetime@^5.1.2: mimic-fn "^2.1.0" open@^8.0.9, open@^8.4.0: - version "8.4.0" - resolved "https://registry.yarnpkg.com/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" - integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + version "8.4.2" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" + integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== dependencies: define-lazy-prop "^2.0.0" is-docker "^2.1.1" @@ -7187,7 +7230,7 @@ path-key@^3.0.0, path-key@^3.1.0: resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== -path-parse@^1.0.6: +path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== @@ -7219,7 +7262,7 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== @@ -7246,12 +7289,12 @@ postcss-calc@^8.2.3: postcss-selector-parser "^6.0.9" postcss-value-parser "^4.2.0" -postcss-colormin@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a" - integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== +postcss-colormin@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.3.1.tgz#86c27c26ed6ba00d96c79e08f3ffb418d1d1988f" + integrity sha512-UsWQG0AqTFQmpBegeLLc1+c3jIqBNB0zlDGRWR+dQ3pRKJL1oeMzyqmH3o2PIfn9MBdNrVPWhDbT769LxCTLJQ== dependencies: - browserslist "^4.16.6" + browserslist "^4.21.4" caniuse-api "^3.0.0" colord "^2.9.1" postcss-value-parser "^4.2.0" @@ -7292,13 +7335,13 @@ postcss-discard-unused@^5.1.0: postcss-selector-parser "^6.0.5" postcss-loader@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-7.0.1.tgz#4c883cc0a1b2bfe2074377b7a74c1cd805684395" - integrity sha512-VRviFEyYlLjctSM93gAZtcJJ/iSkPZ79zWbN/1fSH+NisBByEiVLqpdVDrPLVSi8DX0oJo12kL/GppTBdKVXiQ== + version "7.3.3" + resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-7.3.3.tgz#6da03e71a918ef49df1bb4be4c80401df8e249dd" + integrity sha512-YgO/yhtevGO/vJePCQmTxiaEwER94LABZN0ZMT4A0vsak9TpO+RvKRs7EmJ8peIlB9xfXCsS7M8LjqncsUZ5HA== dependencies: - cosmiconfig "^7.0.0" - klona "^2.0.5" - semver "^7.3.7" + cosmiconfig "^8.2.0" + jiti "^1.18.2" + semver "^7.3.8" postcss-merge-idents@^5.1.1: version "5.1.1" @@ -7316,10 +7359,10 @@ postcss-merge-longhand@^5.1.7: postcss-value-parser "^4.2.0" stylehacks "^5.1.1" -postcss-merge-rules@^5.1.3: - version "5.1.3" - resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.1.3.tgz#8f97679e67cc8d08677a6519afca41edf2220894" - integrity sha512-LbLd7uFC00vpOuMvyZop8+vvhnfRGpp2S+IMQKeuOZZapPRY4SMq5ErjQeHbHsjCUgJkRNrlU+LmxsKIqPKQlA== +postcss-merge-rules@^5.1.4: + version "5.1.4" + resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.1.4.tgz#2f26fa5cacb75b1402e213789f6766ae5e40313c" + integrity sha512-0R2IuYpgU93y9lhVbO/OylTtKMVcHb67zjWIfCiKR9rWL3GUk1677LAqD/BcHizukdZEjT8Ru3oHRoAYoJy44g== dependencies: browserslist "^4.21.4" caniuse-api "^3.0.0" @@ -7363,10 +7406,10 @@ postcss-modules-extract-imports@^3.0.0: resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== -postcss-modules-local-by-default@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" - integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== +postcss-modules-local-by-default@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.3.tgz#b08eb4f083050708998ba2c6061b50c2870ca524" + integrity sha512-2/u2zraspoACtrbFRnTijMiQtb4GW4BvatjaG/bCjYQo8kLTdevCUlwuBHx2sCnSyrI3x3qj4ZK1j5LQBgzmwA== dependencies: icss-utils "^5.0.0" postcss-selector-parser "^6.0.2" @@ -7464,10 +7507,10 @@ postcss-reduce-idents@^5.2.0: dependencies: postcss-value-parser "^4.2.0" -postcss-reduce-initial@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.1.1.tgz#c18b7dfb88aee24b1f8e4936541c29adbd35224e" - integrity sha512-//jeDqWcHPuXGZLoolFrUXBDyuEGbr9S2rMo19bkTIjBQ4PqkaO+oI8wua5BOUxpfi97i3PCoInsiFIEBfkm9w== +postcss-reduce-initial@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.1.2.tgz#798cd77b3e033eae7105c18c9d371d989e1382d6" + integrity sha512-dE/y2XRaqAi6OvjzD22pjTUQ8eOfc6m/natGHgKFBK9DxFmIm69YmaRVQrGgFlEfc1HePIurY0TmDeROK05rIg== dependencies: browserslist "^4.21.4" caniuse-api "^3.0.0" @@ -7480,17 +7523,17 @@ postcss-reduce-transforms@^5.1.0: postcss-value-parser "^4.2.0" postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.9: - version "6.0.10" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" - integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + version "6.0.13" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.13.tgz#d05d8d76b1e8e173257ef9d60b706a8e5e99bf1b" + integrity sha512-EaV1Gl4mUEV4ddhDnv/xtj7sxwrwxdetHdWUGnT4VJQf+4d05v6lHYZr8N573k5Z0BViss7BDhfWtKS3+sfAqQ== dependencies: cssesc "^3.0.0" util-deprecate "^1.0.2" postcss-sort-media-queries@^4.2.1: - version "4.3.0" - resolved "https://registry.yarnpkg.com/postcss-sort-media-queries/-/postcss-sort-media-queries-4.3.0.tgz#f48a77d6ce379e86676fc3f140cf1b10a06f6051" - integrity sha512-jAl8gJM2DvuIJiI9sL1CuiHtKM4s5aEIomkU8G3LFvbP+p8i7Sz8VV63uieTgoewGqKbi+hxBTiOKJlB35upCg== + version "4.4.1" + resolved "https://registry.yarnpkg.com/postcss-sort-media-queries/-/postcss-sort-media-queries-4.4.1.tgz#04a5a78db3921eb78f28a1a781a2e68e65258128" + integrity sha512-QDESFzDDGKgpiIh4GYXsSy6sek2yAwQx1JASl5AxBtU1Lq2JfKBljIPNdil989NcSKRQX1ToiaKphImtBuhXWw== dependencies: sort-css-media-queries "2.1.0" @@ -7519,12 +7562,12 @@ postcss-zindex@^5.1.0: resolved "https://registry.yarnpkg.com/postcss-zindex/-/postcss-zindex-5.1.0.tgz#4a5c7e5ff1050bd4c01d95b1847dfdcc58a496ff" integrity sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A== -postcss@^8.3.11, postcss@^8.4.13, postcss@^8.4.14, postcss@^8.4.7: - version "8.4.16" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.16.tgz#33a1d675fac39941f5f445db0de4db2b6e01d43c" - integrity sha512-ipHE1XBvKzm5xI7hiHCZJCSugxvsdq2mPnsq5+UF+VHCjiBvtDrlxJfMBToWaP9D5XlgNmcFGqoHmUn0EYEaRQ== +postcss@^8.3.11, postcss@^8.4.14, postcss@^8.4.17, postcss@^8.4.21: + version "8.4.29" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.29.tgz#33bc121cf3b3688d4ddef50be869b2a54185a1dd" + integrity sha512-cbI+jaqIeu/VGqXEarWkRCCffhjgXc0qjBtXpqJhTBohMUjUQnbBr0xqX3vEKudc4iviTewcJo5ajcec5+wdJw== dependencies: - nanoid "^3.3.4" + nanoid "^3.3.6" picocolors "^1.0.0" source-map-js "^1.0.2" @@ -7552,9 +7595,9 @@ prepend-http@^2.0.0: integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA== prettier@^2.3.0: - version "2.4.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.4.1.tgz#671e11c89c14a4cfc876ce564106c4a6726c9f5c" - integrity sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA== + version "2.8.8" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" + integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== pretty-error@^4.0.0: version "4.0.0" @@ -7599,16 +7642,7 @@ prompts@^2.4.2: kleur "^3.0.3" sisteransi "^1.0.5" -prop-types@^15.0.0, prop-types@^15.7.2: - version "15.7.2" - resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" - integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ== - dependencies: - loose-envify "^1.4.0" - object-assign "^4.1.1" - react-is "^16.8.1" - -prop-types@^15.6.2: +prop-types@^15.0.0, prop-types@^15.6.2, prop-types@^15.7.2: version "15.8.1" resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== @@ -7651,9 +7685,9 @@ punycode@^1.3.2: integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== punycode@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + version "2.3.0" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" + integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== pupa@^2.1.1: version "2.1.1" @@ -7667,10 +7701,15 @@ pure-color@^1.2.0: resolved "https://registry.yarnpkg.com/pure-color/-/pure-color-1.3.0.tgz#1fe064fb0ac851f0de61320a8bf796836422f33e" integrity sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA== -qs@6.10.3: - version "6.10.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== +qrcode.react@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/qrcode.react/-/qrcode.react-3.1.0.tgz#5c91ddc0340f768316fbdb8fff2765134c2aecd8" + integrity sha512-oyF+Urr3oAMUG/OiOuONL3HXM+53wvuH3mtIWQrYmsXoAq0DkvZp2RYUWFSMFtbdOpuS++9v+WAkzNVkMlNW6Q== + +qs@6.11.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" + integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== dependencies: side-channel "^1.0.4" @@ -7714,52 +7753,51 @@ raw-body@2.5.1: unpipe "1.0.0" rc-align@^4.0.0: - version "4.0.13" - resolved "https://registry.yarnpkg.com/rc-align/-/rc-align-4.0.13.tgz#5aa1b7b9d20e63f18eb12550cac2eb8d5ef3fe4b" - integrity sha512-l/UwiJllPFVLL/bfDpm0W2ySb1heXeSELnmiMRjXiNp0sboO0z7DnjItXQKS8fNRp6CApzRT1+P6pNWZztbbnA== + version "4.0.15" + resolved "https://registry.yarnpkg.com/rc-align/-/rc-align-4.0.15.tgz#2bbd665cf85dfd0b0244c5a752b07565e9098577" + integrity sha512-wqJtVH60pka/nOX7/IspElA8gjPNQKIx/ZqJ6heATCkXpe1Zg4cPVrMD2vC96wjsFFL8WsmhPbx9tdMo1qqlIA== dependencies: "@babel/runtime" "^7.10.1" classnames "2.x" dom-align "^1.7.0" - lodash "^4.17.21" - rc-util "^5.3.0" + rc-util "^5.26.0" resize-observer-polyfill "^1.5.1" -rc-cascader@~3.7.0: - version "3.7.0" - resolved "https://registry.yarnpkg.com/rc-cascader/-/rc-cascader-3.7.0.tgz#98134df578ce1cca22be8fb4319b04df4f3dca36" - integrity sha512-SFtGpwmYN7RaWEAGTS4Rkc62ZV/qmQGg/tajr/7mfIkleuu8ro9Hlk6J+aA0x1YS4zlaZBtTcSaXM01QMiEV/A== +rc-cascader@~3.14.0: + version "3.14.1" + resolved "https://registry.yarnpkg.com/rc-cascader/-/rc-cascader-3.14.1.tgz#495f00b8d047a54fa64df3102f4d6e4a664feaf2" + integrity sha512-fCsgjLIQqYZMhFj9UT+x2ZW4uobx7OP5yivcn6Xto5fuxHaldphsryzCeUVmreQOHEo0RP+032Ip9RDzrKVKJA== dependencies: "@babel/runtime" "^7.12.5" array-tree-filter "^2.1.0" classnames "^2.3.1" - rc-select "~14.1.0" + rc-select "~14.7.0" rc-tree "~5.7.0" - rc-util "^5.6.1" + rc-util "^5.35.0" -rc-checkbox@~2.3.0: - version "2.3.2" - resolved "https://registry.yarnpkg.com/rc-checkbox/-/rc-checkbox-2.3.2.tgz#f91b3678c7edb2baa8121c9483c664fa6f0aefc1" - integrity sha512-afVi1FYiGv1U0JlpNH/UaEXdh6WUJjcWokj/nUN2TgG80bfG+MDdbfHKlLcNNba94mbjy2/SXJ1HDgrOkXGAjg== +rc-checkbox@~3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/rc-checkbox/-/rc-checkbox-3.1.0.tgz#6be0d9d8de2cc96fb5e37f9036a1c3e360d0a42d" + integrity sha512-PAwpJFnBa3Ei+5pyqMMXdcKYKNBMS+TvSDiLdDnARnMJHC8ESxwPfm4Ao1gJiKtWLdmGfigascnCpwrHFgoOBQ== dependencies: "@babel/runtime" "^7.10.1" - classnames "^2.2.1" + classnames "^2.3.2" + rc-util "^5.25.2" -rc-collapse@~3.4.2: - version "3.4.2" - resolved "https://registry.yarnpkg.com/rc-collapse/-/rc-collapse-3.4.2.tgz#1310be7ad4cd0dcfc622c45f6c3b5ffdee403ad7" - integrity sha512-jpTwLgJzkhAgp2Wpi3xmbTbbYExg6fkptL67Uu5LCRVEj6wqmy0DHTjjeynsjOLsppHGHu41t1ELntZ0lEvS/Q== +rc-collapse@~3.7.0: + version "3.7.1" + resolved "https://registry.yarnpkg.com/rc-collapse/-/rc-collapse-3.7.1.tgz#bda1f7f80adccf3433c1c15d4d9f9ca09910c727" + integrity sha512-N/7ejyiTf3XElNJBBpxqnZBUuMsQWEOPjB2QkfNvZ/Ca54eAvJXuOD1EGbCWCk2m7v/MSxku7mRpdeaLOCd4Gg== dependencies: "@babel/runtime" "^7.10.1" classnames "2.x" rc-motion "^2.3.4" - rc-util "^5.2.1" - shallowequal "^1.1.0" + rc-util "^5.27.0" -rc-dialog@~9.0.0, rc-dialog@~9.0.2: - version "9.0.2" - resolved "https://registry.yarnpkg.com/rc-dialog/-/rc-dialog-9.0.2.tgz#aadfebdeba145f256c1fac9b9f509f893cdbb5b8" - integrity sha512-s3U+24xWUuB6Bn2Lk/Qt6rufy+uT+QvWkiFhNBcO9APLxcFFczWamaq7x9h8SCuhfc1nHcW4y8NbMsnAjNnWyg== +rc-dialog@~9.1.0: + version "9.1.0" + resolved "https://registry.yarnpkg.com/rc-dialog/-/rc-dialog-9.1.0.tgz#6bf6fcc0453503b7643e54a5a445e835e3850649" + integrity sha512-5ry+JABAWEbaKyYsmITtrJbZbJys8CtMyzV8Xn4LYuXMeUx5XVHNyJRoqLFE4AzBuXXzOWeaC49cg+XkxK6kHA== dependencies: "@babel/runtime" "^7.10.1" "@rc-component/portal" "^1.0.0-8" @@ -7767,294 +7805,284 @@ rc-dialog@~9.0.0, rc-dialog@~9.0.2: rc-motion "^2.3.0" rc-util "^5.21.0" -rc-drawer@~6.0.0: - version "6.0.3" - resolved "https://registry.yarnpkg.com/rc-drawer/-/rc-drawer-6.0.3.tgz#09993ecdf88ddd569d5a3341d907e3ab258096bb" - integrity sha512-u4RajgrnREKQH/21gB2JHZiA6ZECo0X0BbmDxAJEhKD9jUhlAbqMN5I9VWa4PSzi9ceLHUShqQcPAh2EJswffw== +rc-drawer@~6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/rc-drawer/-/rc-drawer-6.2.0.tgz#fddf4825b0fa9d60e317b996f70278d594d1f668" + integrity sha512-spPkZ3WvP0U0vy5dyzSwlUJ/+vLFtjP/cTwSwejhQRoDBaexSZHsBhELoCZcEggI7LQ7typmtG30lAue2HEhvA== dependencies: "@babel/runtime" "^7.10.1" - "@rc-component/portal" "^1.0.0-6" + "@rc-component/portal" "^1.1.1" classnames "^2.2.6" rc-motion "^2.6.1" rc-util "^5.21.2" -rc-dropdown@~4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/rc-dropdown/-/rc-dropdown-4.0.1.tgz#f65d9d3d89750241057db59d5a75e43cd4576b68" - integrity sha512-OdpXuOcme1rm45cR0Jzgfl1otzmU4vuBVb+etXM8vcaULGokAKVpKlw8p6xzspG7jGd/XxShvq+N3VNEfk/l5g== +rc-dropdown@~4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/rc-dropdown/-/rc-dropdown-4.1.0.tgz#418a68939631520de80d0865d02b440eeeb4168e" + integrity sha512-VZjMunpBdlVzYpEdJSaV7WM7O0jf8uyDjirxXLZRNZ+tAC+NzD3PXPEtliFwGzVwBBdCmGuSqiS9DWcOLxQ9tw== dependencies: "@babel/runtime" "^7.18.3" + "@rc-component/trigger" "^1.7.0" classnames "^2.2.6" - rc-trigger "^5.3.1" rc-util "^5.17.0" -rc-field-form@~1.27.0: - version "1.27.3" - resolved "https://registry.yarnpkg.com/rc-field-form/-/rc-field-form-1.27.3.tgz#e5262796b91c80848a42a3e7a669bf459f08d63d" - integrity sha512-HGqxHnmGQgkPApEcikV4qTg3BLPC82uB/cwBDftDt1pYaqitJfSl5TFTTUMKVEJVT5RqJ2Zi68ME1HmIMX2HAw== +rc-field-form@~1.36.0: + version "1.36.2" + resolved "https://registry.yarnpkg.com/rc-field-form/-/rc-field-form-1.36.2.tgz#0a4e75ab9849e3c2517b8b07c1f97ecd3e52db55" + integrity sha512-tCF/JjUsnxW80Gk4E4ZH74ONsaQMxVTRtui6XhQB8DJc4FHWLLa5pP8zwhxtPKC5NaO0QZ0Cv79JggDubn6n2g== dependencies: "@babel/runtime" "^7.18.0" async-validator "^4.1.0" - rc-util "^5.8.0" + rc-util "^5.32.2" -rc-image@~5.12.0: - version "5.12.1" - resolved "https://registry.yarnpkg.com/rc-image/-/rc-image-5.12.1.tgz#1560eda00ef9d33ebdb3c8c74ab134eb00f973d4" - integrity sha512-FMldR/ODwQmlFlhjR4c6hsOHmnn4s9CxmW7PR/9XCYE1XHlGJ5OkSWOtJruoaLjVwt2tQYDRnLANf/mKZ9ReUg== +rc-image@~7.1.0: + version "7.1.3" + resolved "https://registry.yarnpkg.com/rc-image/-/rc-image-7.1.3.tgz#0072547c7c0a70e6badfb4bee320806c5bf7427b" + integrity sha512-foMl1rcit1F0+vgxE5kf0c8TygQcHhILsOohQUL+JMUbzOo3OBFRcehJudYbqbCTArzCecS8nA1irUU9vvgQbg== dependencies: "@babel/runtime" "^7.11.2" "@rc-component/portal" "^1.0.2" classnames "^2.2.6" - rc-dialog "~9.0.0" - rc-util "^5.0.6" + rc-dialog "~9.1.0" + rc-motion "^2.6.2" + rc-util "^5.34.1" -rc-input-number@~7.4.0: - version "7.4.0" - resolved "https://registry.yarnpkg.com/rc-input-number/-/rc-input-number-7.4.0.tgz#b8b4ffa8bbc04198e79ce8b9611756d046d128ec" - integrity sha512-r/Oub/sPYbzqLNUOHnnc9sbCu78a81KX+RCbRwmpvB4W6nptUySbdWS5KHV4Hak5CAE1LAd+wWm5JjvZizG1FA== +rc-input-number@~8.0.2: + version "8.0.4" + resolved "https://registry.yarnpkg.com/rc-input-number/-/rc-input-number-8.0.4.tgz#d33cfe4126e10f4771fe11a40797222c76d6598f" + integrity sha512-TP+G5b7mZtbwXJ/YEZXF/OgbEZ6iqD4+RSuxZJ8VGKGXDcdt0FKIvpFoNQr/knspdFC4OxA0OfsWfFWfN4XSyA== dependencies: "@babel/runtime" "^7.10.1" "@rc-component/mini-decimal" "^1.0.1" classnames "^2.2.5" - rc-util "^5.23.0" + rc-input "~1.1.0" + rc-util "^5.28.0" -rc-input@~0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/rc-input/-/rc-input-0.1.4.tgz#45cb4ba209ae6cc835a2acb8629d4f8f0cb347e0" - integrity sha512-FqDdNz+fV2dKNgfXzcSLKvC+jEs1709t7nD+WdfjrdSaOcefpgc7BUJYadc3usaING+b7ediMTfKxuJBsEFbXA== +rc-input@~1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/rc-input/-/rc-input-1.1.1.tgz#af33b49272220f6d42852d21b22e84c2dc1a87e6" + integrity sha512-NTR1Z4em681L8/ewb2KR80RykSmN8I2mzqzJDCoUmTrV1BB9Hk5d7ha4TnfgdEPPL148N+603sW2LExSXk1IbA== dependencies: "@babel/runtime" "^7.11.1" classnames "^2.2.1" rc-util "^5.18.1" -rc-mentions@~1.13.1: - version "1.13.1" - resolved "https://registry.yarnpkg.com/rc-mentions/-/rc-mentions-1.13.1.tgz#c884b70e1505a197f1b32a7c6b39090db6992a72" - integrity sha512-FCkaWw6JQygtOz0+Vxz/M/NWqrWHB9LwqlY2RtcuFqWJNFK9njijOOzTSsBGANliGufVUzx/xuPHmZPBV0+Hgw== +rc-mentions@~2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/rc-mentions/-/rc-mentions-2.5.0.tgz#8b936e497e0deb922f40df46e42efc3f596ec207" + integrity sha512-rERXsbUTNVrb5T/iDC0ki/SRGWJnOVraDy6O25Us3FSpuUZ3uq2TPZB4fRk0Hss5kyiEPzz2sprhkI4b+F4jUw== dependencies: - "@babel/runtime" "^7.10.1" + "@babel/runtime" "^7.22.5" + "@rc-component/trigger" "^1.5.0" classnames "^2.2.6" - rc-menu "~9.8.0" - rc-textarea "^0.4.0" - rc-trigger "^5.0.4" + rc-input "~1.1.0" + rc-menu "~9.10.0" + rc-textarea "~1.3.0" rc-util "^5.22.5" -rc-menu@~9.8.0: - version "9.8.1" - resolved "https://registry.yarnpkg.com/rc-menu/-/rc-menu-9.8.1.tgz#988cd807f78d2f92eab686f6813f50b165f78405" - integrity sha512-179weouypfjWJSRvvoo/vPy+StojsMzK2XC5jRNhL1ryt/N/8wAFESte8K6jZJkNp9DHDLFTe+dCGmikKpiFuA== +rc-menu@~9.10.0: + version "9.10.0" + resolved "https://registry.yarnpkg.com/rc-menu/-/rc-menu-9.10.0.tgz#5e0982e26786d67c8ebdba50406b197884c749a7" + integrity sha512-g27kpXaAoJh/fkPZF65/d4V+w4DhDeqomBdPcGnkFAcJnEM4o21TnVccrBUoDedLKzC7wJRw1Q7VTqEsfEufmw== dependencies: "@babel/runtime" "^7.10.1" + "@rc-component/trigger" "^1.6.2" classnames "2.x" rc-motion "^2.4.3" - rc-overflow "^1.2.8" - rc-trigger "^5.1.2" - rc-util "^5.12.0" - shallowequal "^1.1.0" + rc-overflow "^1.3.1" + rc-util "^5.27.0" -rc-motion@^2.0.0, rc-motion@^2.0.1, rc-motion@^2.3.0, rc-motion@^2.3.4, rc-motion@^2.4.3, rc-motion@^2.4.4, rc-motion@^2.6.0, rc-motion@^2.6.1, rc-motion@^2.6.2: - version "2.6.2" - resolved "https://registry.yarnpkg.com/rc-motion/-/rc-motion-2.6.2.tgz#3d31f97e41fb8e4f91a4a4189b6a98ac63342869" - integrity sha512-4w1FaX3dtV749P8GwfS4fYnFG4Rb9pxvCYPc/b2fw1cmlHJWNNgOFIz7ysiD+eOrzJSvnLJWlNQQncpNMXwwpg== +rc-motion@^2.0.0, rc-motion@^2.0.1, rc-motion@^2.3.0, rc-motion@^2.3.4, rc-motion@^2.4.3, rc-motion@^2.4.4, rc-motion@^2.6.0, rc-motion@^2.6.1, rc-motion@^2.6.2, rc-motion@^2.7.3: + version "2.8.0" + resolved "https://registry.yarnpkg.com/rc-motion/-/rc-motion-2.8.0.tgz#5a8231632d7f5304873661424f293d6ee389854b" + integrity sha512-9gWWzlPvx/IJANj+t+ArqLCQ43rCWYLpOUe6+WJSAGb+b+fqBcfx81qPhg6b+ewa6g3mGNDhkTpBrVrCC4gcXA== dependencies: "@babel/runtime" "^7.11.1" classnames "^2.2.1" rc-util "^5.21.0" -rc-notification@~5.0.0-alpha.9: - version "5.0.0-alpha.9" - resolved "https://registry.yarnpkg.com/rc-notification/-/rc-notification-5.0.0-alpha.9.tgz#e6fbf5cc786e508f022691a61a03c0473f5ca7b0" - integrity sha512-QPvq8VHe2M0SE5DHJf7ADWlvfWKnTsj5FVxcu39gdjX98kKmi+BHY1eTPAQkkdGqd6ZXv6xXHl8qKHyWhQcFPA== +rc-notification@~5.0.4: + version "5.0.5" + resolved "https://registry.yarnpkg.com/rc-notification/-/rc-notification-5.0.5.tgz#33a86864b7491749742cfaef0df0117a9b967926" + integrity sha512-uEz2jggourwv/rR0obe7RHEa63UchqX4k+e+Qt2c3LaY7U9Tc+L6ANhzgCKYSA/afm0ebjmNZHoB5Cv47xEOcA== dependencies: "@babel/runtime" "^7.10.1" classnames "2.x" rc-motion "^2.6.0" rc-util "^5.20.1" -rc-overflow@^1.0.0, rc-overflow@^1.2.8: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc-overflow/-/rc-overflow-1.2.8.tgz#40f140fabc244118543e627cdd1ef750d9481a88" - integrity sha512-QJ0UItckWPQ37ZL1dMEBAdY1dhfTXFL9k6oTTcyydVwoUNMnMqCGqnRNA98axSr/OeDKqR6DVFyi8eA5RQI/uQ== +rc-overflow@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/rc-overflow/-/rc-overflow-1.3.2.tgz#72ee49e85a1308d8d4e3bd53285dc1f3e0bcce2c" + integrity sha512-nsUm78jkYAoPygDAcGZeC2VwIg/IBGSodtOY3pMof4W3M9qRJgqaDYm03ZayHlde3I6ipliAxbN0RUcGf5KOzw== dependencies: "@babel/runtime" "^7.11.1" classnames "^2.2.1" rc-resize-observer "^1.0.0" - rc-util "^5.19.2" + rc-util "^5.37.0" -rc-pagination@~3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/rc-pagination/-/rc-pagination-3.2.0.tgz#4f2fdba9fdac0f48e5c9fb1141973818138af7e1" - integrity sha512-5tIXjB670WwwcAJzAqp2J+cOBS9W3cH/WU1EiYwXljuZ4vtZXKlY2Idq8FZrnYBz8KhN3vwPo9CoV/SJS6SL1w== +rc-pagination@~3.6.0: + version "3.6.1" + resolved "https://registry.yarnpkg.com/rc-pagination/-/rc-pagination-3.6.1.tgz#2db6678a57cd2f4f29d6c0416e282543af52d0df" + integrity sha512-R/sUnKKXx1Nm4kZfUKS3YKa7yEPF1ZkVB/AynQaHt+nMER7h9wPTfliDJFdYo+RM/nk2JD4Yc5QpUq8fIQHeug== dependencies: "@babel/runtime" "^7.10.1" classnames "^2.2.1" + rc-util "^5.32.2" -rc-picker@~3.1.1: - version "3.1.4" - resolved "https://registry.yarnpkg.com/rc-picker/-/rc-picker-3.1.4.tgz#4806dc36a78424abaee610830777c8e22a23c74c" - integrity sha512-4qANXNc3C02YENNQvun329zf9VLvSQ2W8RkKQRu8k1P+EtSGqe3klcAKCfz/1TuCiDvgRjJlzRmyZAkwvsbI8w== +rc-picker@~3.13.0: + version "3.13.1" + resolved "https://registry.yarnpkg.com/rc-picker/-/rc-picker-3.13.1.tgz#06adc7b1ccbcfa05ff1ac9aae0c696fb5546f20d" + integrity sha512-211SrinX5IXZ9FMMDUMyPLuGOdfftUtd8zj4lqudpFxlMdtgV5+hXUJMBKb26xmDsleOm5iySK6KIHgiaI+U4w== dependencies: "@babel/runtime" "^7.10.1" + "@rc-component/trigger" "^1.5.0" classnames "^2.2.1" - rc-trigger "^5.0.4" - rc-util "^5.4.0" - shallowequal "^1.1.0" + rc-util "^5.30.0" rc-progress@~3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/rc-progress/-/rc-progress-3.4.1.tgz#a9ffe099e88a4fc03afb09d8603162bf0760d743" - integrity sha512-eAFDHXlk8aWpoXl0llrenPMt9qKHQXphxcVsnKs0FHC6eCSk1ebJtyaVjJUzKe0233ogiLDeEFK1Uihz3s67hw== + version "3.4.2" + resolved "https://registry.yarnpkg.com/rc-progress/-/rc-progress-3.4.2.tgz#f8df9ee95e790490171ab6b31bf07303cdc79980" + integrity sha512-iAGhwWU+tsayP+Jkl9T4+6rHeQTG9kDz8JAHZk4XtQOcYN5fj9H34NXNEdRdZx94VUDHMqCb1yOIvi8eJRh67w== dependencies: "@babel/runtime" "^7.10.1" classnames "^2.2.6" rc-util "^5.16.1" -rc-rate@~2.9.0: - version "2.9.2" - resolved "https://registry.yarnpkg.com/rc-rate/-/rc-rate-2.9.2.tgz#4a58965d1ecf91896ebae01d458b59056df0b4ea" - integrity sha512-SaiZFyN8pe0Fgphv8t3+kidlej+cq/EALkAJAc3A0w0XcPaH2L1aggM8bhe1u6GAGuQNAoFvTLjw4qLPGRKV5g== +rc-rate@~2.12.0: + version "2.12.0" + resolved "https://registry.yarnpkg.com/rc-rate/-/rc-rate-2.12.0.tgz#0182deffed3b009cdcc61660da8746c39ed91ed5" + integrity sha512-g092v5iZCdVzbjdn28FzvWebK2IutoVoiTeqoLTj9WM7SjA/gOJIw5/JFZMRyJYYVe1jLAU2UhAfstIpCNRozg== dependencies: "@babel/runtime" "^7.10.1" classnames "^2.2.5" rc-util "^5.0.1" -rc-resize-observer@^1.0.0, rc-resize-observer@^1.1.0, rc-resize-observer@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/rc-resize-observer/-/rc-resize-observer-1.2.0.tgz#9f46052f81cdf03498be35144cb7c53fd282c4c7" - integrity sha512-6W+UzT3PyDM0wVCEHfoW3qTHPTvbdSgiA43buiy8PzmeMnfgnDeb9NjdimMXMl3/TcrvvWl5RRVdp+NqcR47pQ== +rc-resize-observer@^1.0.0, rc-resize-observer@^1.1.0, rc-resize-observer@^1.2.0, rc-resize-observer@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/rc-resize-observer/-/rc-resize-observer-1.3.1.tgz#b61b9f27048001243617b81f95e53d7d7d7a6a3d" + integrity sha512-iFUdt3NNhflbY3mwySv5CA1TC06zdJ+pfo0oc27xpf4PIOvfZwZGtD9Kz41wGYqC4SLio93RVAirSSpYlV/uYg== dependencies: - "@babel/runtime" "^7.10.1" + "@babel/runtime" "^7.20.7" classnames "^2.2.1" - rc-util "^5.15.0" + rc-util "^5.27.0" resize-observer-polyfill "^1.5.1" -rc-segmented@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/rc-segmented/-/rc-segmented-2.1.0.tgz#0e0afe646c1a0e44a0e18785f518c42633ec8efc" - integrity sha512-hUlonro+pYoZcwrH6Vm56B2ftLfQh046hrwif/VwLIw1j3zGt52p5mREBwmeVzXnSwgnagpOpfafspzs1asjGw== +rc-segmented@~2.2.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/rc-segmented/-/rc-segmented-2.2.2.tgz#a34f12ce6c0975fc3042ae7656bcd18e1744798e" + integrity sha512-Mq52M96QdHMsNdE/042ibT5vkcGcD5jxKp7HgPC2SRofpia99P5fkfHy1pEaajLMF/kj0+2Lkq1UZRvqzo9mSA== dependencies: "@babel/runtime" "^7.11.1" classnames "^2.2.1" rc-motion "^2.4.4" rc-util "^5.17.0" -rc-select@~14.1.0, rc-select@~14.1.13: - version "14.1.16" - resolved "https://registry.yarnpkg.com/rc-select/-/rc-select-14.1.16.tgz#0cc4b5a1fc551a2db7c96bc1ece0896317ecdd47" - integrity sha512-71XLHleuZmufpdV2vis5oituRkhg2WNvLpVMJBGWRar6WGAVOHXaY9DR5HvwWry3EGTn19BqnL6Xbybje6f8YA== +rc-select@~14.7.0, rc-select@~14.7.1: + version "14.7.4" + resolved "https://registry.yarnpkg.com/rc-select/-/rc-select-14.7.4.tgz#742d85861e83604237784f60e2ba9dabcde8eac9" + integrity sha512-qRUpvMVXFy6rdHe+qzHXAqyQAfhErC/oY8dcRtoRjoz0lz2Nx3J+lLL5AnEbjnwlS+/kQTJUZ/65WyCwWwcLwQ== dependencies: "@babel/runtime" "^7.10.1" + "@rc-component/trigger" "^1.5.0" classnames "2.x" rc-motion "^2.0.1" - rc-overflow "^1.0.0" - rc-trigger "^5.0.4" + rc-overflow "^1.3.1" rc-util "^5.16.1" - rc-virtual-list "^3.2.0" + rc-virtual-list "^3.5.2" -rc-slider@~10.0.0: - version "10.0.1" - resolved "https://registry.yarnpkg.com/rc-slider/-/rc-slider-10.0.1.tgz#7058c68ff1e1aa4e7c3536e5e10128bdbccb87f9" - integrity sha512-igTKF3zBet7oS/3yNiIlmU8KnZ45npmrmHlUUio8PNbIhzMcsh+oE/r2UD42Y6YD2D/s+kzCQkzQrPD6RY435Q== +rc-slider@~10.1.0: + version "10.1.1" + resolved "https://registry.yarnpkg.com/rc-slider/-/rc-slider-10.1.1.tgz#5e82036e60b61021aba3ea0e353744dd7c74e104" + integrity sha512-gn8oXazZISEhnmRinI89Z/JD/joAaM35jp+gDtIVSTD/JJMCCBqThqLk1SVJmvtfeiEF/kKaFY0+qt4SDHFUDw== dependencies: "@babel/runtime" "^7.10.1" classnames "^2.2.5" - rc-util "^5.18.1" - shallowequal "^1.1.0" + rc-util "^5.27.0" -rc-steps@~6.0.0-alpha.2: - version "6.0.0-alpha.2" - resolved "https://registry.yarnpkg.com/rc-steps/-/rc-steps-6.0.0-alpha.2.tgz#505e64177111becd911cef4d24f15477438c9e59" - integrity sha512-d/GPx7ATlPbtFeOVt5FB19W11OBCmRd7lLknt4aSoCI6ukwJqpEhWu2INN4pDOQqN04y3PDsWl1q9hnw+ZC5AA== +rc-steps@~6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/rc-steps/-/rc-steps-6.0.1.tgz#c2136cd0087733f6d509209a84a5c80dc29a274d" + integrity sha512-lKHL+Sny0SeHkQKKDJlAjV5oZ8DwCdS2hFhAkIjuQt1/pB81M0cA0ErVFdHq9+jmPmFw1vJB2F5NBzFXLJxV+g== dependencies: "@babel/runtime" "^7.16.7" classnames "^2.2.3" rc-util "^5.16.1" -rc-switch@~4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/rc-switch/-/rc-switch-4.0.0.tgz#55fbf99fc2d680791175037d379e170ba51fbe78" - integrity sha512-IfrYC99vN0gKaTyjQdqYuADU0eH00SAFHg3jOp8HrmUpJruhV1SohJzrCbPqPraZeX/6X/QKkdLfkdnUub05WA== +rc-switch@~4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/rc-switch/-/rc-switch-4.1.0.tgz#f37d81b4e0c5afd1274fd85367b17306bf25e7d7" + integrity sha512-TI8ufP2Az9oEbvyCeVE4+90PDSljGyuwix3fV58p7HV2o4wBnVToEyomJRVyTaZeqNPAp+vqeo4Wnj5u0ZZQBg== dependencies: - "@babel/runtime" "^7.10.1" + "@babel/runtime" "^7.21.0" classnames "^2.2.1" - rc-util "^5.0.1" + rc-util "^5.30.0" -rc-table@~7.26.0: - version "7.26.0" - resolved "https://registry.yarnpkg.com/rc-table/-/rc-table-7.26.0.tgz#9d517e7fa512e7571fdcc453eb1bf19edfac6fbc" - integrity sha512-0cD8e6S+DTGAt5nBZQIPFYEaIukn17sfa5uFL98faHlH/whZzD8ii3dbFL4wmUDEL4BLybhYop+QUfZJ4CPvNQ== +rc-table@~7.32.1: + version "7.32.3" + resolved "https://registry.yarnpkg.com/rc-table/-/rc-table-7.32.3.tgz#9773563dc206ff12b6f023b7223b7056908d6241" + integrity sha512-MqjrI/ibuGg7NEyFsux0dM5GK+3er1gTiZofAkifr2bHf/Sa1nUqXXFmSrYXSOjwpx0xyBnJ3GrHFCIqC/eOzw== dependencies: "@babel/runtime" "^7.10.1" + "@rc-component/context" "^1.3.0" classnames "^2.2.5" rc-resize-observer "^1.1.0" - rc-util "^5.22.5" - shallowequal "^1.1.0" + rc-util "^5.27.1" -rc-tabs@~12.4.2: - version "12.4.2" - resolved "https://registry.yarnpkg.com/rc-tabs/-/rc-tabs-12.4.2.tgz#487a1b3f8d8cf0bfc121224013dab00d4a8e0532" - integrity sha512-FFlGwuTjQUznWzJtyhmHc6KAp5lRQFxKUv9Aj1UtsOYe2e7WGmuzcrd+/LQchuPe0VjhaZPdGkmFGcqGqNO6ow== +rc-tabs@~12.9.0: + version "12.9.0" + resolved "https://registry.yarnpkg.com/rc-tabs/-/rc-tabs-12.9.0.tgz#6d9af43d8ad2c47be00c75bee92417a4842d29d2" + integrity sha512-2HnVowgMVrq0DfQtyu4mCd9E6pXlWNdM6VaDvOOHMsLYqPmpY+7zBqUC6YrrQ9xYXHciTS0e7TtjOHIvpVCHLQ== dependencies: "@babel/runtime" "^7.11.2" classnames "2.x" - rc-dropdown "~4.0.0" - rc-menu "~9.8.0" + rc-dropdown "~4.1.0" + rc-menu "~9.10.0" rc-motion "^2.6.2" rc-resize-observer "^1.0.0" rc-util "^5.16.0" -rc-textarea@^0.4.0, rc-textarea@~0.4.5: - version "0.4.7" - resolved "https://registry.yarnpkg.com/rc-textarea/-/rc-textarea-0.4.7.tgz#627f662d46f99e0059d1c1ebc8db40c65339fe90" - integrity sha512-IQPd1CDI3mnMlkFyzt2O4gQ2lxUsnBAeJEoZGJnkkXgORNqyM9qovdrCj9NzcRfpHgLdzaEbU3AmobNFGUznwQ== +rc-textarea@~1.3.0, rc-textarea@~1.3.3: + version "1.3.4" + resolved "https://registry.yarnpkg.com/rc-textarea/-/rc-textarea-1.3.4.tgz#e77baf2202ac8f7e34a50ec9e15dd1dcb1501455" + integrity sha512-wn0YjTpvcVolcfXa0HtzL+jgV2QcwtfB29RwNAKj8hMgZOju1V24M3TfEDjABeQEAQbUGbjMbISREOX/YSVKhg== dependencies: "@babel/runtime" "^7.10.1" classnames "^2.2.1" + rc-input "~1.1.0" rc-resize-observer "^1.0.0" - rc-util "^5.24.4" - shallowequal "^1.1.0" + rc-util "^5.27.0" -rc-tooltip@~5.2.0: - version "5.2.2" - resolved "https://registry.yarnpkg.com/rc-tooltip/-/rc-tooltip-5.2.2.tgz#e5cafa8ecebf78108936a0bcb93c150fa81ac93b" - integrity sha512-jtQzU/18S6EI3lhSGoDYhPqNpWajMtS5VV/ld1LwyfrDByQpYmw/LW6U7oFXXLukjfDHQ7Ju705A82PRNFWYhg== +rc-tooltip@~6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/rc-tooltip/-/rc-tooltip-6.0.1.tgz#6a5e33bd6c3f6afe8851ea90e7af43e5c26b3cc6" + integrity sha512-MdvPlsD1fDSxKp9+HjXrc/CxLmA/s11QYIh1R7aExxfodKP7CZA++DG1AjrW80F8IUdHYcR43HAm0Y2BYPelHA== dependencies: "@babel/runtime" "^7.11.2" + "@rc-component/trigger" "^1.0.4" classnames "^2.3.1" - rc-trigger "^5.0.0" -rc-tree-select@~5.5.4: - version "5.5.5" - resolved "https://registry.yarnpkg.com/rc-tree-select/-/rc-tree-select-5.5.5.tgz#d28b3b45da1e820cd21762ba0ee93c19429bb369" - integrity sha512-k2av7jF6tW9bIO4mQhaVdV4kJ1c54oxV3/hHVU+oD251Gb5JN+m1RbJFTMf1o0rAFqkvto33rxMdpafaGKQRJw== +rc-tree-select@~5.11.0: + version "5.11.2" + resolved "https://registry.yarnpkg.com/rc-tree-select/-/rc-tree-select-5.11.2.tgz#66b4a95843c6c1e04ba923a485e8b13def3ad6d0" + integrity sha512-ujRFO3pcjSg8R4ndXX2oiNcCu+RgO9ZPcd23CZy18Khm+nRsfWWS3Su7qB0iuoJgzAJ5LK7b6Dio0t7IQDGs9g== dependencies: "@babel/runtime" "^7.10.1" classnames "2.x" - rc-select "~14.1.0" + rc-select "~14.7.0" rc-tree "~5.7.0" rc-util "^5.16.1" -rc-tree@~5.7.0: - version "5.7.2" - resolved "https://registry.yarnpkg.com/rc-tree/-/rc-tree-5.7.2.tgz#270ea7d9e1b2e5e81cd3659eba2fbd022a4831f6" - integrity sha512-nmnL6qLnfwVckO5zoqKL2I9UhwDqzyCtjITQCkwhimyz1zfuFkG5ZPIXpzD/Guzso94qQA/QrMsvzic5W6QDjg== +rc-tree@~5.7.0, rc-tree@~5.7.6: + version "5.7.10" + resolved "https://registry.yarnpkg.com/rc-tree/-/rc-tree-5.7.10.tgz#3d66c2a81ffd24cbb8b816e7a747f626e57cb0fc" + integrity sha512-n4UkMQY3bzvJUNnbw6e3YI7sy2kE9c9vAYbSt94qAhcPKtMOThONNr1LIaFB/M5XeFYYrWVbvRVoT8k38eFuSQ== dependencies: "@babel/runtime" "^7.10.1" classnames "2.x" rc-motion "^2.0.1" rc-util "^5.16.1" - rc-virtual-list "^3.4.8" - -rc-trigger@^5.0.0, rc-trigger@^5.0.4, rc-trigger@^5.1.2, rc-trigger@^5.2.10, rc-trigger@^5.3.1, rc-trigger@^5.3.4: - version "5.3.4" - resolved "https://registry.yarnpkg.com/rc-trigger/-/rc-trigger-5.3.4.tgz#6b4b26e32825677c837d1eb4d7085035eecf9a61" - integrity sha512-mQv+vas0TwKcjAO2izNPkqR4j86OemLRmvL2nOzdP9OWNWA1ivoTt5hzFqYNW9zACwmTezRiN8bttrC7cZzYSw== - dependencies: - "@babel/runtime" "^7.18.3" - classnames "^2.2.6" - rc-align "^4.0.0" - rc-motion "^2.0.0" - rc-util "^5.19.2" + rc-virtual-list "^3.5.1" rc-upload@~4.3.0: version "4.3.4" @@ -8065,35 +8093,25 @@ rc-upload@~4.3.0: classnames "^2.2.5" rc-util "^5.2.0" -rc-util@^5.0.1, rc-util@^5.0.6, rc-util@^5.12.0, rc-util@^5.15.0, rc-util@^5.16.0, rc-util@^5.16.1, rc-util@^5.17.0, rc-util@^5.18.1, rc-util@^5.19.2, rc-util@^5.2.0, rc-util@^5.2.1, rc-util@^5.20.1, rc-util@^5.21.0, rc-util@^5.21.2, rc-util@^5.22.5, rc-util@^5.23.0, rc-util@^5.24.2, rc-util@^5.24.4, rc-util@^5.25.2, rc-util@^5.3.0, rc-util@^5.4.0, rc-util@^5.6.1, rc-util@^5.8.0: - version "5.25.3" - resolved "https://registry.yarnpkg.com/rc-util/-/rc-util-5.25.3.tgz#7f6a5895e4edc5acdf5f73e90e1c031f3b67257d" - integrity sha512-+M+44T6UdM4iOd4QXRQKQjitOY26vC5pgFPNSo0XsY9OWzpHvy77BI55eL9Q9oDMUHzVuRNzzUkK1RI2W3n+ZQ== - dependencies: - "@babel/runtime" "^7.18.3" - react-is "^16.12.0" - shallowequal "^1.1.0" - -rc-util@^5.9.4: - version "5.23.0" - resolved "https://registry.yarnpkg.com/rc-util/-/rc-util-5.23.0.tgz#a583b1ec3e1832a80eced7a700a494af0b590743" - integrity sha512-lgm6diJ/pLgyfoZY59Vz7sW4mSoQCgozqbBye9IJ7/mb5w5h4T7h+i2JpXAx/UBQxscBZe68q0sP7EW+qfkKUg== +rc-util@^5.0.1, rc-util@^5.16.0, rc-util@^5.16.1, rc-util@^5.17.0, rc-util@^5.18.1, rc-util@^5.2.0, rc-util@^5.20.1, rc-util@^5.21.0, rc-util@^5.21.2, rc-util@^5.22.5, rc-util@^5.24.4, rc-util@^5.25.2, rc-util@^5.26.0, rc-util@^5.27.0, rc-util@^5.27.1, rc-util@^5.28.0, rc-util@^5.30.0, rc-util@^5.31.1, rc-util@^5.32.2, rc-util@^5.33.0, rc-util@^5.34.1, rc-util@^5.35.0, rc-util@^5.36.0, rc-util@^5.37.0, rc-util@^5.9.4: + version "5.37.0" + resolved "https://registry.yarnpkg.com/rc-util/-/rc-util-5.37.0.tgz#6df9a55cb469b41b6995530a45b5f3dd3219a4ea" + integrity sha512-cPMV8DzaHI1KDaS7XPRXAf4J7mtBqjvjikLpQieaeOO7+cEbqY2j7Kso/T0R0OiEZTNcLS/8Zl9YrlXiO9UbjQ== dependencies: "@babel/runtime" "^7.18.3" react-is "^16.12.0" - shallowequal "^1.1.0" -rc-virtual-list@^3.2.0, rc-virtual-list@^3.4.8: - version "3.4.13" - resolved "https://registry.yarnpkg.com/rc-virtual-list/-/rc-virtual-list-3.4.13.tgz#20acc934b263abcf7b7c161f50ef82281b2f7e8d" - integrity sha512-cPOVDmcNM7rH6ANotanMDilW/55XnFPw0Jh/GQYtrzZSy3AmWvCnqVNyNC/pgg3lfVmX2994dlzAhuUrd4jG7w== +rc-virtual-list@^3.5.1, rc-virtual-list@^3.5.2: + version "3.10.5" + resolved "https://registry.yarnpkg.com/rc-virtual-list/-/rc-virtual-list-3.10.5.tgz#a203ca60bf3334e16193f641db1e99a48ae76574" + integrity sha512-Vc89TL3JHfRlLVQXVj5Hmv0dIflgwmHDcbjt9lrZjOG3wNUDkTF5zci8kFDU/CzdmmqgKu+CUktEpT10VUKYSQ== dependencies: "@babel/runtime" "^7.20.0" classnames "^2.2.6" rc-resize-observer "^1.0.0" - rc-util "^5.15.0" + rc-util "^5.36.0" -rc@^1.2.7, rc@^1.2.8: +rc@1.2.8, rc@^1.2.7, rc@^1.2.8: version "1.2.8" resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== @@ -8157,9 +8175,9 @@ react-error-overlay@^6.0.11: integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== react-fast-compare@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-3.2.0.tgz#641a9da81b6a6320f270e89724fb45a0b39e43bb" - integrity sha512-rtGImPZ0YyLrscKI9xTpV8psd6I8VAtjKCzQDlzyDvqJA8XOW78TXYQwNRNd8g8JZnDu8q9Fu/1v4HPAVwVdHA== + version "3.2.2" + resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-3.2.2.tgz#929a97a532304ce9fee4bcae44234f1ce2c21d49" + integrity sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ== react-helmet-async@*, react-helmet-async@^1.3.0: version "1.3.0" @@ -8172,7 +8190,7 @@ react-helmet-async@*, react-helmet-async@^1.3.0: react-fast-compare "^3.2.0" shallowequal "^1.1.0" -react-is@^16.12.0, react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1: +react-is@^16.12.0, react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0: version "16.13.1" resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== @@ -8252,28 +8270,27 @@ react-router-config@^5.1.1: "@babel/runtime" "^7.1.2" react-router-dom@^5.3.3: - version "5.3.3" - resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.3.3.tgz#8779fc28e6691d07afcaf98406d3812fe6f11199" - integrity sha512-Ov0tGPMBgqmbu5CDmN++tv2HQ9HlWDuWIIqn4b88gjlAN5IHI+4ZUZRcpz9Hl0azFIwihbLDYw1OiHGRo7ZIng== + version "5.3.4" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.3.4.tgz#2ed62ffd88cae6db134445f4a0c0ae8b91d2e5e6" + integrity sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ== dependencies: "@babel/runtime" "^7.12.13" history "^4.9.0" loose-envify "^1.3.1" prop-types "^15.6.2" - react-router "5.3.3" + react-router "5.3.4" tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react-router@5.3.3, react-router@^5.3.3: - version "5.3.3" - resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.3.3.tgz#8e3841f4089e728cf82a429d92cdcaa5e4a3a288" - integrity sha512-mzQGUvS3bM84TnbtMYR8ZjKnuPJ71IjSzR+DE6UkUqvN4czWIqEs17yLL8xkAycv4ev0AiN+IGrWu88vJs/p2w== +react-router@5.3.4, react-router@^5.3.3: + version "5.3.4" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.3.4.tgz#8ca252d70fcc37841e31473c7a151cf777887bb5" + integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA== dependencies: "@babel/runtime" "^7.12.13" history "^4.9.0" hoist-non-react-statics "^3.1.0" loose-envify "^1.3.1" - mini-create-react-context "^0.4.0" path-to-regexp "^1.7.0" prop-types "^15.6.2" react-is "^16.6.0" @@ -8290,11 +8307,11 @@ react-style-singleton@^2.2.1: tslib "^2.0.0" react-textarea-autosize@^8.3.2: - version "8.4.0" - resolved "https://registry.yarnpkg.com/react-textarea-autosize/-/react-textarea-autosize-8.4.0.tgz#4d0244d6a50caa897806b8c44abc0540a69bfc8c" - integrity sha512-YrTFaEHLgJsi8sJVYHBzYn+mkP3prGkmP2DKb/tm0t7CLJY5t1Rxix8070LAKb0wby7bl/lf2EeHkuMihMZMwQ== + version "8.5.3" + resolved "https://registry.yarnpkg.com/react-textarea-autosize/-/react-textarea-autosize-8.5.3.tgz#d1e9fe760178413891484847d3378706052dd409" + integrity sha512-XT1024o2pqCuZSuBt9FwHlaDeNtVrtCXu0Rnz88t1jUGheCLa3PhjE1GH8Ctm2axEtvdCl5SUHYschyQ0L5QHQ== dependencies: - "@babel/runtime" "^7.10.2" + "@babel/runtime" "^7.20.13" use-composed-ref "^1.3.0" use-latest "^1.2.1" @@ -8316,9 +8333,9 @@ react@^18.2.0: loose-envify "^1.1.0" readable-stream@^2.0.1: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + version "2.3.8" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" + integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" @@ -8329,9 +8346,9 @@ readable-stream@^2.0.1: util-deprecate "~1.0.1" readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" - integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" @@ -8375,41 +8392,36 @@ regenerate@^1.4.2: resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== -regenerator-runtime@^0.13.11: - version "0.13.11" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" - integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== - -regenerator-runtime@^0.13.4: - version "0.13.9" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" - integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== +regenerator-runtime@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz#5e19d68eb12d486f797e15a3c6a918f7cec5eb45" + integrity sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA== -regenerator-transform@^0.15.1: - version "0.15.1" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.1.tgz#f6c4e99fc1b4591f780db2586328e4d9a9d8dc56" - integrity sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg== +regenerator-transform@^0.15.2: + version "0.15.2" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.2.tgz#5bbae58b522098ebdf09bca2f83838929001c7a4" + integrity sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg== dependencies: "@babel/runtime" "^7.8.4" -regexpu-core@^5.2.1: - version "5.2.2" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.2.2.tgz#3e4e5d12103b64748711c3aad69934d7718e75fc" - integrity sha512-T0+1Zp2wjF/juXMrMxHxidqGYn8U4R+zleSJhX9tQ1PUsS8a9UtYfbsF9LdiVgNX3kiX8RNaKM42nfSgvFJjmw== +regexpu-core@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.2.tgz#11a2b06884f3527aec3e93dbbf4a3b958a95546b" + integrity sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ== dependencies: + "@babel/regjsgen" "^0.8.0" regenerate "^1.4.2" regenerate-unicode-properties "^10.1.0" - regjsgen "^0.7.1" regjsparser "^0.9.1" unicode-match-property-ecmascript "^2.0.0" unicode-match-property-value-ecmascript "^2.1.0" registry-auth-token@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.1.tgz#6d7b4006441918972ccd5fedcd41dc322c79b250" - integrity sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw== + version "4.2.2" + resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.2.tgz#f02d49c3668884612ca031419491a13539e21fac" + integrity sha512-PC5ZysNb42zpFME6D/XlIgtNGdTl8bBOCw90xQLVMpzuuubJKYDWFAEuUNc+Cn8Z8724tg2SDhDRrkVEsqfDMg== dependencies: - rc "^1.2.8" + rc "1.2.8" registry-url@^5.0.0: version "5.1.0" @@ -8418,11 +8430,6 @@ registry-url@^5.0.0: dependencies: rc "^1.2.8" -regjsgen@^0.7.1: - version "0.7.1" - resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.7.1.tgz#ee5ef30e18d3f09b7c369b76e7c2373ed25546f6" - integrity sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA== - regjsparser@^0.9.1: version "0.9.1" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" @@ -8496,9 +8503,9 @@ remark-parse@8.0.3: xtend "^4.0.1" remark-parse@^10.0.0: - version "10.0.1" - resolved "https://registry.yarnpkg.com/remark-parse/-/remark-parse-10.0.1.tgz#6f60ae53edbf0cf38ea223fe643db64d112e0775" - integrity sha512-1fUyHr2jLsVOkhbvPRBJ5zTKZZyD6yZzYaWCS6BPBdQ8vEMBCH+9zNCDA6tET/zHCi/jLqjCWtlJZUPk+DbnFw== + version "10.0.2" + resolved "https://registry.yarnpkg.com/remark-parse/-/remark-parse-10.0.2.tgz#ca241fde8751c2158933f031a4e3efbaeb8bc262" + integrity sha512-3ydxgHa/ZQzG8LvC7jTXccARYDcRld3VfcgIIFs7bI6vbRSxJJmzgLEIIoYKyrfhaY+ujuWaf/PJiMZXoiCXgw== dependencies: "@types/mdast" "^3.0.0" mdast-util-from-markdown "^1.0.0" @@ -8578,12 +8585,13 @@ resolve-pathname@^3.0.0: integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== resolve@^1.1.6, resolve@^1.14.2, resolve@^1.3.2: - version "1.20.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" - integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== + version "1.22.4" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.4.tgz#1dc40df46554cdaf8948a486a10f6ba1e2026c34" + integrity sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg== dependencies: - is-core-module "^2.2.0" - path-parse "^1.0.6" + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" responselike@^1.0.2: version "1.0.2" @@ -8632,9 +8640,9 @@ run-parallel@^1.1.9: queue-microtask "^1.2.2" rxjs@^7.5.4: - version "7.5.6" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.6.tgz#0446577557862afd6903517ce7cae79ecb9662bc" - integrity sha512-dnyv2/YsXhnm461G+R/Pe5bWP41Nm6LBXEYWI6eiFP4fiwx6WRI/CD0zbdVAudd9xwLEF2IDcKXLHit0FYjUzw== + version "7.8.1" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== dependencies: tslib "^2.1.0" @@ -8661,9 +8669,9 @@ safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sass-loader@^10.1.1: - version "10.2.0" - resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-10.2.0.tgz#3d64c1590f911013b3fa48a0b22a83d5e1494716" - integrity sha512-kUceLzC1gIHz0zNJPpqRsJyisWatGYNFRmv2CKZK2/ngMJgLqxTbXwe/hJ85luyvZkgqU3VlJ33UVF2T/0g6mw== + version "10.4.1" + resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-10.4.1.tgz#bea4e173ddf512c9d7f53e9ec686186146807cbf" + integrity sha512-aX/iJZTTpNUNx/OSYzo2KsjIUQHqvWsAhhUijFjAPdZTEhstjZI9zTNvkTTwsx+uNUJqUwOw5gacxQMx4hJxGQ== dependencies: klona "^2.0.4" loader-utils "^2.0.0" @@ -8672,11 +8680,13 @@ sass-loader@^10.1.1: semver "^7.3.2" sass@^1.43.2: - version "1.43.3" - resolved "https://registry.yarnpkg.com/sass/-/sass-1.43.3.tgz#aa16a69131b84f0cd23189a242571e8905f1ce43" - integrity sha512-BJnLngqWpMeS65UvlYYEuCb3/fLxDxhHtOB/gWPxs6NKrslTxGt3ZxwIvOe/0Jm4tWwM/+tIpE3wj4dLEhPDeQ== + version "1.66.1" + resolved "https://registry.yarnpkg.com/sass/-/sass-1.66.1.tgz#04b51c4671e4650aa393740e66a4e58b44d055b1" + integrity sha512-50c+zTsZOJVgFfTgwwEzkjA3/QACgdNsKueWPyAR0mRINIvLAStVQBbPg14iuqEQ74NPDbXzJARJ/O4SI1zftA== dependencies: chokidar ">=3.0.0 <4.0.0" + immutable "^4.0.0" + source-map-js ">=0.6.2 <2.0.0" sax@^1.2.4: version "1.2.4" @@ -8708,31 +8718,31 @@ schema-utils@^2.6.5: ajv "^6.12.4" ajv-keywords "^3.5.2" -schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" - integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== +schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== dependencies: "@types/json-schema" "^7.0.8" ajv "^6.12.5" ajv-keywords "^3.5.2" schema-utils@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" - integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + version "4.2.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.2.0.tgz#70d7c93e153a273a805801882ebd3bff20d89c8b" + integrity sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw== dependencies: "@types/json-schema" "^7.0.9" - ajv "^8.8.0" + ajv "^8.9.0" ajv-formats "^2.1.1" - ajv-keywords "^5.0.0" + ajv-keywords "^5.1.0" scroll-into-view-if-needed@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.0.3.tgz#57256bef78f3c3c288070d2aaa63cf547aa11e70" - integrity sha512-QoCH0lVw0tbA7Rl6sToH7e1tO3n95Oi6JgBgC8hEpNNZUC91MfasJ/4E1ZdbzGueNDZ+Y7ObfRaelKUgTyPbJA== + version "3.0.10" + resolved "https://registry.yarnpkg.com/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.0.10.tgz#38fbfe770d490baff0fb2ba34ae3539f6ec44e13" + integrity sha512-t44QCeDKAPf1mtQH3fYpWz8IM/DyvHLjs8wUvvwMYxk5moOqCzrMSxK6HQVD0QVmVjXFavoFIPRVrMuJPKAvtg== dependencies: - compute-scroll-into-view "^2.0.2" + compute-scroll-into-view "^3.0.2" section-matter@^1.0.0: version "1.0.0" @@ -8747,10 +8757,10 @@ select-hose@^2.0.0: resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== -selfsigned@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.0.1.tgz#8b2df7fa56bf014d19b6007655fff209c0ef0a56" - integrity sha512-LmME957M1zOsUhG+67rAjKfiWFox3SBxE/yymatMZsAx+oMrJ0YQ8AToOnyCm7xbeg2ep37IHLxdu0o2MavQOQ== +selfsigned@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" + integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== dependencies: node-forge "^1" @@ -8762,19 +8772,19 @@ semver-diff@^3.1.1: semver "^6.3.0" semver@^5.4.1: - version "5.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" - integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + version "5.7.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^6.0.0, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7: - version "7.3.7" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== +semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8: + version "7.5.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" @@ -8797,10 +8807,10 @@ send@0.18.0: range-parser "~1.2.1" statuses "2.0.1" -serialize-javascript@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" - integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== +serialize-javascript@^6.0.0, serialize-javascript@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.1.tgz#b206efb27c3da0b0ab6b52f48d170b7996458e5c" + integrity sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w== dependencies: randombytes "^2.1.0" @@ -8895,9 +8905,9 @@ shebang-regex@^3.0.0: integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== shell-quote@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" - integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== + version "1.8.1" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" + integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== shelljs@^0.8.5: version "0.8.5" @@ -8918,9 +8928,9 @@ side-channel@^1.0.4: object-inspect "^1.9.0" signal-exit@^3.0.2, signal-exit@^3.0.3: - version "3.0.5" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.5.tgz#9e3e8cc0c75a99472b44321033a7702e7738252f" - integrity sha512-KWcOiKeQj6ZyXx7zq4YxSMgHRlod4czeBQZrPb8OKcohcqAXShm7E20kEMle9WBt26hFcAf0qLOcp5zmY7kOqQ== + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== simple-concat@^1.0.0: version "1.0.1" @@ -8943,14 +8953,14 @@ simple-swizzle@^0.2.2: dependencies: is-arrayish "^0.3.1" -sirv@^1.0.7: - version "1.0.18" - resolved "https://registry.yarnpkg.com/sirv/-/sirv-1.0.18.tgz#105fab52fb656ce8a2bebbf36b11052005952899" - integrity sha512-f2AOPogZmXgJ9Ma2M22ZEhc1dNtRIzcEkiflMFeVTRq+OViOZMvH1IPMVOwrKaxpSaHioBJiDR0SluRqGa7atA== +sirv@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/sirv/-/sirv-2.0.3.tgz#ca5868b87205a74bef62a469ed0296abceccd446" + integrity sha512-O9jm9BsID1P+0HOi81VpXPoDxYP374pkOLzACAoyUQ/3OUVndNpsz6wMnY2z+yOxzbllCKZrM+9QrWsv4THnyA== dependencies: "@polka/url" "^1.0.0-next.20" - mime "^2.3.1" - totalist "^1.0.0" + mrmime "^1.0.0" + totalist "^3.0.0" sisteransi@^1.0.5: version "1.0.5" @@ -8991,12 +9001,12 @@ sort-css-media-queries@2.1.0: resolved "https://registry.yarnpkg.com/sort-css-media-queries/-/sort-css-media-queries-2.1.0.tgz#7c85e06f79826baabb232f5560e9745d7a78c4ce" integrity sha512-IeWvo8NkNiY2vVYdPa27MCQiR0MN0M80johAYFVxWWXQ44KU84WNxjslwBHmc/7ZL2ccwkM7/e6S5aiKZXm7jA== -source-map-js@^1.0.2: +"source-map-js@>=0.6.2 <2.0.0", source-map-js@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== -source-map-support@^0.5.17, source-map-support@~0.5.20: +source-map-support@~0.5.20: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== @@ -9073,9 +9083,9 @@ statuses@2.0.1: integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== std-env@^3.0.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.2.1.tgz#00e260ec3901333537125f81282b9296b00d7304" - integrity sha512-D/uYFWkI/31OrnKmXZqGAGK5GbQRPp/BWA1nuITcc6ICblhhuQUPHS5E2GSCVS7Hwhf4ciq8qsATwBUxv+lI6w== + version "3.4.3" + resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.4.3.tgz#326f11db518db751c83fd58574f449b7c3060910" + integrity sha512-f9aPhy8fYBuMN+sNfakZV18U39PbalgjXG3lLB9WkaYTxijru61wb57V9wxxNthXM5Sd88ETBWi29qLAsHO52Q== string-convert@^0.2.0: version "0.2.1" @@ -9131,9 +9141,9 @@ strip-ansi@^6.0.0, strip-ansi@^6.0.1: ansi-regex "^5.0.1" strip-ansi@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" - integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== dependencies: ansi-regex "^6.0.1" @@ -9165,9 +9175,9 @@ style-to-object@0.3.0, style-to-object@^0.3.0: inline-style-parser "0.1.1" style-to-object@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-0.4.1.tgz#53cf856f7cf7f172d72939d9679556469ba5de37" - integrity sha512-HFpbb5gr2ypci7Qw+IOhnP2zOU7e77b+rzM+wTzXzfi1PrtBCX0E7Pk4wL4iTLnhzZ+JgEGAhX81ebTg/aYjQw== + version "0.4.2" + resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-0.4.2.tgz#a8247057111dea8bd3b8a1a66d2d0c9cf9218a54" + integrity sha512-1JGpfPB3lo42ZX8cuPrheZbfQ6kqPPnPHlKMyeRYtfKD+0jG+QsXgXN57O/dvJlzlB2elI6dGmrPnl5VPQFPaA== dependencies: inline-style-parser "0.1.1" @@ -9180,9 +9190,9 @@ stylehacks@^5.1.1: postcss-selector-parser "^6.0.4" stylis@^4.0.13: - version "4.1.3" - resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.1.3.tgz#fd2fbe79f5fed17c55269e16ed8da14c84d069f7" - integrity sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA== + version "4.3.0" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.3.0.tgz#abe305a669fc3d8777e10eefcfc73ad861c5588c" + integrity sha512-E87pIogpwUsUwXw7dNyU4QDjdgVMy52m+XEOPEKUn161cCzWjjhPSQhByfd1CcNvrOLnXQ6OnnZDwnJrz/Z4YQ== subscriptions-transport-ws@^0.9.18: version "0.9.19" @@ -9216,6 +9226,11 @@ supports-color@^8.0.0: dependencies: has-flag "^4.0.0" +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + svg-parser@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" @@ -9278,24 +9293,24 @@ tar-stream@^2.1.4: inherits "^2.0.3" readable-stream "^3.1.1" -terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.3.3: - version "5.3.6" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz#5590aec31aa3c6f771ce1b1acca60639eab3195c" - integrity sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ== +terser-webpack-plugin@^5.3.3, terser-webpack-plugin@^5.3.7: + version "5.3.9" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz#832536999c51b46d468067f9e37662a3b96adfe1" + integrity sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA== dependencies: - "@jridgewell/trace-mapping" "^0.3.14" + "@jridgewell/trace-mapping" "^0.3.17" jest-worker "^27.4.5" schema-utils "^3.1.1" - serialize-javascript "^6.0.0" - terser "^5.14.1" + serialize-javascript "^6.0.1" + terser "^5.16.8" -terser@^5.10.0, terser@^5.14.1: - version "5.15.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.15.0.tgz#e16967894eeba6e1091509ec83f0c60e179f2425" - integrity sha512-L1BJiXVmheAQQy+as0oF3Pwtlo4s3Wi1X2zNZ2NxOB4wx9bdS9Vk67XQENLFdLYGCK/Z2di53mTj/hBafR+dTA== +terser@^5.10.0, terser@^5.16.8: + version "5.19.3" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.19.3.tgz#359baeba615aef13db4b8c4d77a2aa0d8814aa9e" + integrity sha512-pQzJ9UJzM0IgmT4FAtYI6+VqFf0lj/to58AV0Xfgg0Up37RyPG7Al+1cepC6/BVuAxR9oNb41/DL4DEoHJvTdg== dependencies: - "@jridgewell/source-map" "^0.3.2" - acorn "^8.5.0" + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" commander "^2.20.0" source-map-support "~0.5.20" @@ -9304,17 +9319,22 @@ text-table@^0.2.0: resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== +throttle-debounce@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/throttle-debounce/-/throttle-debounce-5.0.0.tgz#a17a4039e82a2ed38a5e7268e4132d6960d41933" + integrity sha512-2iQTSgkkc1Zyk0MeVrt/3BvuOXYPl/R8Z0U2xxo9rjwNciaHDG3R+Lm6dh4EeUci49DanvBnuqI6jshoQQRGEg== + thunky@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== tiny-invariant@^1.0.2: - version "1.1.0" - resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.1.0.tgz#634c5f8efdc27714b7f386c35e6760991d230875" - integrity sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw== + version "1.3.1" + resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.3.1.tgz#8560808c916ef02ecfd55e66090df23a4b7aa642" + integrity sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw== -tiny-warning@^1.0.0, tiny-warning@^1.0.3: +tiny-warning@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754" integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== @@ -9346,10 +9366,10 @@ toidentifier@1.0.1: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== -totalist@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/totalist/-/totalist-1.1.0.tgz#a4d65a3e546517701e3e5c37a47a70ac97fe56df" - integrity sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g== +totalist@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/totalist/-/totalist-3.0.1.tgz#ba3a3d600c915b1a97872348f79c127475f6acf8" + integrity sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ== tr46@~0.0.3: version "0.0.3" @@ -9381,27 +9401,29 @@ trough@^2.0.0: resolved "https://registry.yarnpkg.com/trough/-/trough-2.1.0.tgz#0f7b511a4fde65a46f18477ab38849b22c554876" integrity sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g== -ts-node@^9.1.1: - version "9.1.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-9.1.1.tgz#51a9a450a3e959401bda5f004a72d54b936d376d" - integrity sha512-hPlt7ZACERQGf03M253ytLY3dHbGNGrAq9qIHWUY9XHYl1z7wYngSr3OQ5xmui8o2AaxsONxIzjafLUiWBo1Fg== - dependencies: +ts-node@^10.9.1: + version "10.9.1" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" + integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" arg "^4.1.0" create-require "^1.1.0" diff "^4.0.1" make-error "^1.1.1" - source-map-support "^0.5.17" + v8-compile-cache-lib "^3.0.1" yn "3.1.1" -tslib@^2.0.0: - version "2.5.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.1.tgz#f2ad78c367857d54e49a0ef9def68737e1a67b21" - integrity sha512-KaI6gPil5m9vF7DKaoXxx1ia9fxS4qG5YveErRRVknPDXXriu5M8h48YRjB6h5ZUOKuAKlSJYb0GaDe8I39fRw== - -tslib@^2.0.3, tslib@^2.1.0, tslib@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== +tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.4.0: + version "2.6.2" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== tslib@~2.0.1: version "2.0.3" @@ -9456,14 +9478,14 @@ typedarray-to-buffer@^3.1.5: is-typedarray "^1.0.0" typescript@^4.1.5: - version "4.4.4" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.4.tgz#2cd01a1a1f160704d3101fd5a58ff0f9fcb8030c" - integrity sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA== + version "4.9.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" + integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== -ua-parser-js@^0.7.30: - version "0.7.33" - resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz#1d04acb4ccef9293df6f70f2c3d22f3030d8b532" - integrity sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw== +ua-parser-js@^1.0.35: + version "1.0.35" + resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-1.0.35.tgz#c4ef44343bc3db0a3cbefdf21822f1b1fc1ab011" + integrity sha512-fKnGuqmTBnIE+/KXSzCn4db8RTigUzw1AN0DmdU6hJovUTbYJKyqj+8Mt1c4VfRDnOVJnENmfYkIPZ946UrSAA== unherit@^1.0.4: version "1.1.3" @@ -9663,10 +9685,10 @@ unpipe@1.0.0, unpipe@~1.0.0: resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== -update-browserslist-db@^1.0.9: - version "1.0.10" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" - integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== +update-browserslist-db@^1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz#9a2a641ad2907ae7b3616506f4b977851db5b940" + integrity sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA== dependencies: escalade "^3.1.1" picocolors "^1.0.0" @@ -9751,6 +9773,11 @@ use-sidecar@^1.1.2: detect-node-es "^1.1.0" tslib "^2.0.0" +use-sync-external-store@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz#7dbefd6ef3fe4e767a0cf5d7287aacfb5846928a" + integrity sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA== + util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -9786,6 +9813,11 @@ uvu@^0.5.0: kleur "^4.0.3" sade "^1.7.3" +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + valid-url@1.0.9: version "1.0.9" resolved "https://registry.yarnpkg.com/valid-url/-/valid-url-1.0.9.tgz#1c14479b40f1397a75782f115e4086447433a200" @@ -9889,18 +9921,26 @@ webidl-conversions@^3.0.0: integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== webpack-bundle-analyzer@^4.5.0: - version "4.6.1" - resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.6.1.tgz#bee2ee05f4ba4ed430e4831a319126bb4ed9f5a6" - integrity sha512-oKz9Oz9j3rUciLNfpGFjOb49/jEpXNmWdVH8Ls//zNcnLlQdTGXQQMsBbb/gR7Zl8WNLxVCq+0Hqbx3zv6twBw== + version "4.9.1" + resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.9.1.tgz#d00bbf3f17500c10985084f22f1a2bf45cb2f09d" + integrity sha512-jnd6EoYrf9yMxCyYDPj8eutJvtjQNp8PHmni/e/ulydHBWhT5J3menXt3HEkScsu9YqMAcG4CfFjs3rj5pVU1w== dependencies: + "@discoveryjs/json-ext" "0.5.7" acorn "^8.0.4" acorn-walk "^8.0.0" - chalk "^4.1.0" commander "^7.2.0" + escape-string-regexp "^4.0.0" gzip-size "^6.0.0" - lodash "^4.17.20" + is-plain-object "^5.0.0" + lodash.debounce "^4.0.8" + lodash.escape "^4.0.1" + lodash.flatten "^4.4.0" + lodash.invokemap "^4.6.0" + lodash.pullall "^4.2.0" + lodash.uniqby "^4.7.0" opener "^1.5.2" - sirv "^1.0.7" + picocolors "^1.0.0" + sirv "^2.0.3" ws "^7.3.1" webpack-dev-middleware@^5.3.1: @@ -9915,9 +9955,9 @@ webpack-dev-middleware@^5.3.1: schema-utils "^4.0.0" webpack-dev-server@^4.9.3: - version "4.10.1" - resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.10.1.tgz#124ac9ac261e75303d74d95ab6712b4aec3e12ed" - integrity sha512-FIzMq3jbBarz3ld9l7rbM7m6Rj1lOsgq/DyLGMX/fPEB1UBUPtf5iL/4eNfhx8YYJTRlzfv107UfWSWcBK5Odw== + version "4.15.1" + resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.15.1.tgz#8944b29c12760b3a45bdaa70799b17cb91b03df7" + integrity sha512-5hbAst3h3C3L8w6W4P96L5vaV0PxSmJhxZvWKYIdgxOQm8pNZ5dEOmmSLBVpP85ReeyRt6AS1QJNyo/oFFPeVA== dependencies: "@types/bonjour" "^3.5.9" "@types/connect-history-api-fallback" "^1.3.5" @@ -9925,7 +9965,7 @@ webpack-dev-server@^4.9.3: "@types/serve-index" "^1.9.1" "@types/serve-static" "^1.13.10" "@types/sockjs" "^0.3.33" - "@types/ws" "^8.5.1" + "@types/ws" "^8.5.5" ansi-html-community "^0.0.8" bonjour-service "^1.0.11" chokidar "^3.5.3" @@ -9938,21 +9978,22 @@ webpack-dev-server@^4.9.3: html-entities "^2.3.2" http-proxy-middleware "^2.0.3" ipaddr.js "^2.0.1" + launch-editor "^2.6.0" open "^8.0.9" p-retry "^4.5.0" rimraf "^3.0.2" schema-utils "^4.0.0" - selfsigned "^2.0.1" + selfsigned "^2.1.1" serve-index "^1.9.1" sockjs "^0.3.24" spdy "^4.0.2" webpack-dev-middleware "^5.3.1" - ws "^8.4.2" + ws "^8.13.0" webpack-merge@^5.8.0: - version "5.8.0" - resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61" - integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== + version "5.9.0" + resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.9.0.tgz#dc160a1c4cf512ceca515cc231669e9ddb133826" + integrity sha512-6NbRQw4+Sy50vYNTw7EyOn41OZItPiXB8GNv3INSoe3PSFaHJEz3SHTrYVaRm2LilNGnFUzh0FAwqPEmU/CwDg== dependencies: clone-deep "^4.0.1" wildcard "^2.0.0" @@ -9963,21 +10004,21 @@ webpack-sources@^3.2.2, webpack-sources@^3.2.3: integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.73.0: - version "5.74.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.74.0.tgz#02a5dac19a17e0bb47093f2be67c695102a55980" - integrity sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA== + version "5.88.2" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.88.2.tgz#f62b4b842f1c6ff580f3fcb2ed4f0b579f4c210e" + integrity sha512-JmcgNZ1iKj+aiR0OvTYtWQqJwq37Pf683dY9bVORwVbUrDhLhdn/PlO2sHsFHPkj7sHNQF3JwaAkp49V+Sq1tQ== dependencies: "@types/eslint-scope" "^3.7.3" - "@types/estree" "^0.0.51" - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/wasm-edit" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" + "@types/estree" "^1.0.0" + "@webassemblyjs/ast" "^1.11.5" + "@webassemblyjs/wasm-edit" "^1.11.5" + "@webassemblyjs/wasm-parser" "^1.11.5" acorn "^8.7.1" - acorn-import-assertions "^1.7.6" + acorn-import-assertions "^1.9.0" browserslist "^4.14.5" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.10.0" - es-module-lexer "^0.9.0" + enhanced-resolve "^5.15.0" + es-module-lexer "^1.2.1" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" @@ -9986,9 +10027,9 @@ webpack@^5.73.0: loader-runner "^4.2.0" mime-types "^2.1.27" neo-async "^2.6.2" - schema-utils "^3.1.0" + schema-utils "^3.2.0" tapable "^2.1.1" - terser-webpack-plugin "^5.1.3" + terser-webpack-plugin "^5.3.7" watchpack "^2.4.0" webpack-sources "^3.2.3" @@ -10053,9 +10094,9 @@ widest-line@^4.0.1: string-width "^5.0.1" wildcard@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec" - integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== + version "2.0.1" + resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.1.tgz#5ab10d02487198954836b6349f74fff961e10f67" + integrity sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ== wrap-ansi@^7.0.0: version "7.0.0" @@ -10067,9 +10108,9 @@ wrap-ansi@^7.0.0: strip-ansi "^6.0.0" wrap-ansi@^8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.0.1.tgz#2101e861777fec527d0ea90c57c6b03aac56a5b3" - integrity sha512-QFF+ufAqhoYHvoHdajT/Po7KoXVBPXS2bgjIam5isfWJPfIOnQZ50JtUiVvCv/sjgacf3yRrt2ZKUZ/V4itN4g== + version "8.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== dependencies: ansi-styles "^6.1.0" string-width "^5.0.1" @@ -10096,14 +10137,14 @@ ws@7.4.5: integrity sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g== "ws@^5.2.0 || ^6.0.0 || ^7.0.0", ws@^7.3.1: - version "7.5.5" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.5.tgz#8b4bc4af518cfabd0473ae4f99144287b33eb881" - integrity sha512-BAkMFcAzl8as1G/hArkxOxq3G7pjUqQ3gzYbLL0/5zNkph70e+lCoxBGnm6AW1+/aiNeV4fnKqZ8m4GZewmH2w== - -ws@^8.4.2: - version "8.8.1" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.8.1.tgz#5dbad0feb7ade8ecc99b830c1d77c913d4955ff0" - integrity sha512-bGy2JzvzkPowEJV++hF07hAD6niYSr0JzBNo/J29WsB57A2r7Wlc1UFcTR9IzrPvuNVO4B8LGqF8qcpsVOhJCA== + version "7.5.9" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + +ws@^8.13.0: + version "8.13.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" + integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== xdg-basedir@^4.0.0: version "4.0.0" From 065a290bd5b33885ac7664233434ccae570d6941 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Mon, 4 Sep 2023 15:49:00 +0900 Subject: [PATCH 40/41] fix:change global graph url to static-assets (#8742) --- metadata-ingestion/scripts/modeldocgen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/scripts/modeldocgen.py b/metadata-ingestion/scripts/modeldocgen.py index 415dd57136f68..ffa80515dbafd 100644 --- a/metadata-ingestion/scripts/modeldocgen.py +++ b/metadata-ingestion/scripts/modeldocgen.py @@ -305,7 +305,7 @@ def make_entity_docs(entity_display_name: str, graph: RelationshipGraph) -> str: ) # create global metadata graph - global_graph_url = "https://github.com/datahub-project/datahub/raw/master/docs/imgs/datahub-metadata-model.png" + global_graph_url = "https://github.com/datahub-project/static-assets/raw/main/imgs/datahub-metadata-model.png" global_graph_section = ( f"\n## [Global Metadata Model]({global_graph_url})" + f"\n![Global Graph]({global_graph_url})" From 9f223c0069eb4986e69fb5dc0c849f08843c37b5 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Tue, 5 Sep 2023 14:15:34 +0530 Subject: [PATCH 41/41] doc(tests): fix endpoint param to push results (#8783) --- docs/api/restli/evaluate-tests.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api/restli/evaluate-tests.md b/docs/api/restli/evaluate-tests.md index 3e4b6d73928a4..5b466a5f429d4 100644 --- a/docs/api/restli/evaluate-tests.md +++ b/docs/api/restli/evaluate-tests.md @@ -14,5 +14,5 @@ curl --location --request POST 'https://DOMAIN.acryl.io/gms/test?action=evaluate w The supported parameters are - `urn` - Required URN string -- `shouldPush` - Optional Boolean - whether or not to push the results to persist them +- `push` - Optional Boolean - whether or not to push the results to persist them. Default `false`. - `testUrns` - Optional List of string - If you wish to get specific test URNs evaluated