Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(ingest): bump black formatter #8898

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion metadata-ingestion/examples/cli_usage/gen_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ class CorpGroupFile(BaseModel):


with open("user/user.dhub.yaml_schema.json", "w") as fp:

fp.write(json.dumps(CorpUserFile.schema(), indent=4))

with open("group/group.dhub.yaml_schema.json", "w") as fp:
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,7 @@ def get_long_description():
*s3_base,
# This is pinned only to avoid spurious errors in CI.
# We should make an effort to keep it up to date.
"black==22.12.0",
"black==23.9.1",
"coverage>=5.1",
"faker>=18.4.0",
"flake8>=3.8.3", # DEPRECATION: Once we drop Python 3.7, we can pin to 6.x.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,6 @@ def _generate_properties_mcp(
def generate_mcp(
self, upsert: bool
) -> Iterable[Union[MetadataChangeProposalWrapper, MetadataChangeProposalClass]]:

if self._resolved_domain_urn is None:
raise Exception(
f"Unable to generate MCP-s because we were unable to resolve the domain {self.domain} to an urn."
Expand Down Expand Up @@ -440,7 +439,6 @@ def patch_yaml(
original_dataproduct: DataProduct,
output_file: Path,
) -> bool:

update_needed = False
if not original_dataproduct._original_yaml_dict:
raise Exception("Original Data Product was not loaded from yaml")
Expand Down Expand Up @@ -523,7 +521,6 @@ def to_yaml(
self,
file: Path,
) -> None:

with open(file, "w") as fp:
yaml = YAML(typ="rt") # default, if not specfied, is 'rt' (round-trip)
yaml.indent(mapping=2, sequence=4, offset=2)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def _abort_if_non_existent_urn(graph: DataHubGraph, urn: str, operation: str) ->


def _print_diff(orig_file, new_file):

with open(orig_file) as fp:
orig_lines = fp.readlines()
with open(new_file) as fp:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@


class PathSpecsConfigMixin(ConfigModel):

path_specs: List[PathSpec] = Field(
description="List of PathSpec. See [below](#path-spec) the details about PathSpec"
)
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,6 @@ def external_url_defaults_to_api_config_base_url(
def stateful_ingestion_should_be_enabled(
cls, v: Optional[bool], *, values: Dict[str, Any], **kwargs: Dict[str, Any]
) -> Optional[bool]:

stateful_ingestion: StatefulStaleMetadataRemovalConfig = cast(
StatefulStaleMetadataRemovalConfig, values.get("stateful_ingestion")
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1128,7 +1128,6 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]:
def emit_independent_looks_mcp(
self, dashboard_element: LookerDashboardElement
) -> Iterable[MetadataWorkUnit]:

yield from auto_workunit(
stream=self._make_chart_metadata_events(
dashboard_element=dashboard_element,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1070,7 +1070,6 @@ def _get_fields(
def determine_view_file_path(
cls, base_folder_path: str, absolute_file_path: str
) -> str:

splits: List[str] = absolute_file_path.split(base_folder_path, 1)
if len(splits) != 2:
logger.debug(
Expand Down Expand Up @@ -1104,7 +1103,6 @@ def from_looker_dict(
populate_sql_logic_in_descriptions: bool = False,
process_isolation_for_sql_parsing: bool = False,
) -> Optional["LookerView"]:

view_name = looker_view["name"]
logger.debug(f"Handling view {view_name} in model {model_name}")
# The sql_table_name might be defined in another view and this view is extending that view,
Expand Down Expand Up @@ -2087,7 +2085,6 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901
)

if looker_viewfile is not None:

for raw_view in looker_viewfile.views:
raw_view_name = raw_view["name"]
if LookerRefinementResolver.is_refinement(raw_view_name):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ def parse_custom_sql(
env: str,
platform_instance: Optional[str],
) -> Optional["SqlParsingResult"]:

logger.debug("Using sqlglot_lineage to parse custom sql")

sql_query = remove_special_characters(query)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ def urn_creator(
server: str,
qualified_table_name: str,
) -> str:

platform_detail: PlatformDetail = platform_instance_resolver.get_platform_instance(
PowerBIPlatformDetail(
data_platform_pair=data_platform_pair,
Expand Down Expand Up @@ -148,7 +147,6 @@ def get_db_detail_from_argument(
def parse_custom_sql(
self, query: str, server: str, database: Optional[str], schema: Optional[str]
) -> List[DataPlatformTable]:

dataplatform_tables: List[DataPlatformTable] = []

platform_detail: PlatformDetail = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,6 @@ def log_http_error(e: BaseException, message: str) -> Any:


def get_response_dict(response: requests.Response, error_message: str) -> dict:

result_dict: dict = {}
try:
response.raise_for_status()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,11 @@ def get_percent_entities_changed(
def compute_percent_entities_changed(
new_entities: List[str], old_entities: List[str]
) -> float:
(overlap_count, old_count, _,) = _get_entity_overlap_and_cardinalities(
(
overlap_count,
old_count,
_,
) = _get_entity_overlap_and_cardinalities(
new_entities=new_entities, old_entities=old_entities
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,6 @@ def get_overridden_info(
platform_instance_map: Optional[Dict[str, str]],
lineage_overrides: Optional[TableauLineageOverrides] = None,
) -> Tuple[Optional[str], Optional[str], str, str]:

original_platform = platform = get_platform(connection_type)
if (
lineage_overrides is not None
Expand Down Expand Up @@ -622,7 +621,6 @@ def make_table_urn(
platform_instance_map: Optional[Dict[str, str]],
lineage_overrides: Optional[TableauLineageOverrides] = None,
) -> str:

upstream_db, platform_instance, platform, original_platform = get_overridden_info(
connection_type=connection_type,
upstream_db=upstream_db,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ def transform_generic_aspect(
self, entity_urn: str, aspect_name: str, aspect: Optional[GenericAspectClass]
) -> Optional[GenericAspectClass]:
"""Implement this method to transform the single custom aspect for an entity.
The purpose of this abstract method is to reinforce the use of GenericAspectClass."""
The purpose of this abstract method is to reinforce the use of GenericAspectClass.
"""
pass

def _transform_or_record_mcpc(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ def run_ingest(
"datahub.ingestion.source.state_provider.datahub_ingestion_checkpointing_provider.DataHubGraph",
mock_datahub_graph,
) as mock_checkpoint:

mock_checkpoint.return_value = mock_datahub_graph

mocked_functions_reference(
Expand Down
1 change: 0 additions & 1 deletion metadata-ingestion/tests/integration/looker/test_looker.py
Original file line number Diff line number Diff line change
Expand Up @@ -969,7 +969,6 @@ def test_independent_soft_deleted_looks(
mocked_client = mock.MagicMock()

with mock.patch("looker_sdk.init40") as mock_sdk:

mock_sdk.return_value = mocked_client
setup_mock_look(mocked_client)
setup_mock_soft_deleted_look(mocked_client)
Expand Down
3 changes: 0 additions & 3 deletions metadata-ingestion/tests/integration/okta/test_okta.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,12 @@ def run_ingest(
mocked_functions_reference,
recipe,
):

with patch(
"datahub.ingestion.source.identity.okta.OktaClient"
) as MockClient, patch(
"datahub.ingestion.source.state_provider.datahub_ingestion_checkpointing_provider.DataHubGraph",
mock_datahub_graph,
) as mock_checkpoint:

mock_checkpoint.return_value = mock_datahub_graph

mocked_functions_reference(MockClient=MockClient)
Expand Down Expand Up @@ -277,7 +275,6 @@ def overwrite_group_in_mocked_data(test_resources_dir, MockClient):
def _init_mock_okta_client(
test_resources_dir, MockClient, mock_users_json=None, mock_groups_json=None
):

okta_users_json_file = (
test_resources_dir / "okta_users.json"
if mock_users_json is None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1420,7 +1420,6 @@ def test_reports_with_failed_page_request(
def test_independent_datasets_extraction(
mock_msal, pytestconfig, tmp_path, mock_time, requests_mock
):

test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi"

register_mock_api(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,6 @@ def register_mock_api(request_mock: Any, override_data: dict = {}) -> None:
@freeze_time(FROZEN_TIME)
@pytest.mark.integration
def test_superset_ingest(pytestconfig, tmp_path, mock_time, requests_mock):

test_resources_dir = pytestconfig.rootpath / "tests/integration/superset"

register_mock_api(request_mock=requests_mock)
Expand Down Expand Up @@ -193,7 +192,6 @@ def test_superset_ingest(pytestconfig, tmp_path, mock_time, requests_mock):
def test_superset_stateful_ingest(
pytestconfig, tmp_path, mock_time, requests_mock, mock_datahub_graph
):

test_resources_dir = pytestconfig.rootpath / "tests/integration/superset"

register_mock_api(request_mock=requests_mock)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -792,7 +792,6 @@ def test_tableau_unsupported_csql(mock_datahub_graph):
)

with mock.patch("datahub.ingestion.source.tableau.sqlglot_l") as sqlglot_lineage:

sqlglot_lineage.create_lineage_sql_parsed_result.return_value = SqlParsingResult( # type:ignore
in_tables=[
"urn:li:dataset:(urn:li:dataPlatform:bigquery,my_bigquery_project.invent_dw.userdetail,PROD)"
Expand Down
1 change: 0 additions & 1 deletion metadata-ingestion/tests/unit/test_unity_catalog_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ def test_profiling_requires_warehouses_id():

@freeze_time(FROZEN_TIME)
def test_workspace_url_should_start_with_https():

with pytest.raises(ValueError, match="Workspace URL must start with http scheme"):
UnityCatalogSourceConfig.parse_obj(
{
Expand Down
Loading