Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(sql-parser): prevent bad urns from alter table lineage #11092

Merged
merged 7 commits into from
Aug 8, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 38 additions & 24 deletions metadata-ingestion/src/datahub/sql_parsing/sqlglot_lineage.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,35 +189,49 @@ def _table_level_lineage(
statement: sqlglot.Expression, dialect: sqlglot.Dialect
) -> Tuple[Set[_TableName], Set[_TableName]]:
# Generate table-level lineage.
modified = {
_TableName.from_sqlglot_table(expr.this)
for expr in statement.find_all(
sqlglot.exp.Create,
sqlglot.exp.Insert,
sqlglot.exp.Update,
sqlglot.exp.Delete,
sqlglot.exp.Merge,
)
# In some cases like "MERGE ... then INSERT (col1, col2) VALUES (col1, col2)",
# the `this` on the INSERT part isn't a table.
if isinstance(expr.this, sqlglot.exp.Table)
} | {
# For statements that include a column list, like
# CREATE DDL statements and `INSERT INTO table (col1, col2) SELECT ...`
# the table name is nested inside a Schema object.
_TableName.from_sqlglot_table(expr.this.this)
for expr in statement.find_all(
sqlglot.exp.Create,
sqlglot.exp.Insert,
)
if isinstance(expr.this, sqlglot.exp.Schema)
and isinstance(expr.this.this, sqlglot.exp.Table)
}
modified = (
{
_TableName.from_sqlglot_table(expr.this)
for expr in statement.find_all(
sqlglot.exp.Create,
sqlglot.exp.Insert,
sqlglot.exp.Update,
sqlglot.exp.Delete,
sqlglot.exp.Merge,
sqlglot.exp.AlterTable,
)
# In some cases like "MERGE ... then INSERT (col1, col2) VALUES (col1, col2)",
# the `this` on the INSERT part isn't a table.
if isinstance(expr.this, sqlglot.exp.Table)
}
| {
# For statements that include a column list, like
# CREATE DDL statements and `INSERT INTO table (col1, col2) SELECT ...`
# the table name is nested inside a Schema object.
_TableName.from_sqlglot_table(expr.this.this)
for expr in statement.find_all(
sqlglot.exp.Create,
sqlglot.exp.Insert,
)
if isinstance(expr.this, sqlglot.exp.Schema)
and isinstance(expr.this.this, sqlglot.exp.Table)
}
| {
# For drop statements, we only want it if a table/view is being dropped.
# Other "kinds" will not have table.name populated.
_TableName.from_sqlglot_table(expr.this)
for expr in statement.find_all(sqlglot.exp.Drop)
if isinstance(expr.this, sqlglot.exp.Table)
and expr.this.this
and expr.this.name
}
)

tables = (
{
_TableName.from_sqlglot_table(table)
for table in statement.find_all(sqlglot.exp.Table)
if not isinstance(table.parent, sqlglot.exp.Drop)
}
# ignore references created in this query
- modified
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

logger = logging.getLogger(__name__)

# TODO: Hook this into the standard --update-golden-files mechanism.
UPDATE_FILES = os.environ.get("UPDATE_SQLPARSER_FILES", "false").lower() == "true"


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"query_type": "UNKNOWN",
"query_type_props": {},
"query_fingerprint": "7d04253c3add0194c557942ef9b7485f38e68762d300dad364b9cec8656035b3",
"in_tables": [],
"out_tables": [
"urn:li:dataset:(urn:li:dataPlatform:bigquery,my-bq-project.covid_data.covid_deaths,PROD)",
"urn:li:dataset:(urn:li:dataPlatform:bigquery,patient_name,PROD)"
],
"column_lineage": null,
"debug_info": {
"confidence": 0.2,
"generalized_statement": "ALTER TABLE `my-bq-project.covid_data.covid_deaths` DROP COLUMN patient_name"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"query_type": "UNKNOWN",
"query_type_props": {},
"query_fingerprint": "4eefab57619a812a94030acce0071857561265945e79d798563adb53bd0b9646",
"in_tables": [],
"out_tables": [],
"column_lineage": null,
"debug_info": {
"confidence": 0.9,
"generalized_statement": "DROP SCHEMA my_schema"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"query_type": "UNKNOWN",
"query_type_props": {},
"query_fingerprint": "d1c29ad73325b08bb66e62ec00ba1d5be4412ec72b4bbc9c094f1272b9da4f86",
"in_tables": [],
"out_tables": [
"urn:li:dataset:(urn:li:dataPlatform:sqlite,my_schema.my_table,PROD)"
],
"column_lineage": null,
"debug_info": {
"confidence": 0.2,
"generalized_statement": "DROP TABLE my_schema.my_table"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"query_type": "UNKNOWN",
"query_type_props": {},
"query_fingerprint": "35a3c60e7ed98884dde3f1f5fe9079f844832430589a3326b97d617b8303f191",
"in_tables": [],
"out_tables": [
"urn:li:dataset:(urn:li:dataPlatform:sqlite,my_schema.my_view,PROD)"
],
"column_lineage": null,
"debug_info": {
"confidence": 0.2,
"generalized_statement": "DROP VIEW my_schema.my_view"
}
}
51 changes: 51 additions & 0 deletions metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,22 @@

import pytest

import datahub.testing.check_sql_parser_result as checker
from datahub.testing.check_sql_parser_result import assert_sql_result

RESOURCE_DIR = pathlib.Path(__file__).parent / "goldens"


@pytest.fixture(autouse=True)
def set_update_sql_parser(
pytestconfig: pytest.Config, monkeypatch: pytest.MonkeyPatch
) -> None:
update_golden = pytestconfig.getoption("--update-golden-files")

if update_golden:
monkeypatch.setattr(checker, "UPDATE_FILES", True)


def test_invalid_sql():
assert_sql_result(
"""
Expand Down Expand Up @@ -1202,3 +1213,43 @@ def test_bigquery_information_schema_query() -> None:
dialect="bigquery",
expected_file=RESOURCE_DIR / "test_bigquery_information_schema_query.json",
)


def test_bigquery_alter_table_column() -> None:
assert_sql_result(
"""\
ALTER TABLE `my-bq-project.covid_data.covid_deaths` drop COLUMN patient_name
""",
dialect="bigquery",
expected_file=RESOURCE_DIR / "test_bigquery_alter_table_column.json",
)


def test_sqlite_drop_table() -> None:
assert_sql_result(
"""\
DROP TABLE my_schema.my_table
""",
dialect="sqlite",
expected_file=RESOURCE_DIR / "test_sqlite_drop_table.json",
)


def test_sqlite_drop_view() -> None:
assert_sql_result(
"""\
DROP VIEW my_schema.my_view
""",
dialect="sqlite",
expected_file=RESOURCE_DIR / "test_sqlite_drop_view.json",
)


def test_snowflake_drop_schema() -> None:
assert_sql_result(
"""\
DROP SCHEMA my_schema
""",
dialect="snowflake",
expected_file=RESOURCE_DIR / "test_snowflake_drop_schema.json",
)
Loading