Skip to content

Commit

Permalink
SNOW-640136: Revert breaking changes in snowflake-sqlalchemy v1.4.0 (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-aling authored Aug 8, 2022
1 parent e0ade9c commit eea93a9
Show file tree
Hide file tree
Showing 7 changed files with 34 additions and 68 deletions.
4 changes: 3 additions & 1 deletion DESCRIPTION.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@ Source code is also available at:

- snowflake-sqlalchemy is now SQLAlchemy 2.0 compatible.
- Fixed a bug that `DATE` should not be removed from `SnowflakeDialect.ischema_names`.
- Fixed a breaking change introduced in release 1.4.0 that changed the behavior of processing numeric values returned from service.
- Fixed breaking changes introduced in release 1.4.0 that:
- changed the behavior of processing numeric, datetime and timestamp values returned from service.
- changed the sequence order of primary/foreign keys in list returned by `inspect.get_foreign_keys` and `inspect.get_pk_constraint`.

- v1.4.0(July 20, 2022)

Expand Down
44 changes: 0 additions & 44 deletions src/snowflake/sqlalchemy/custom_types.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
#
# Copyright (c) 2012-2022 Snowflake Computing Inc. All rights reserved.
#
import datetime
import re

import sqlalchemy.types as sqltypes
import sqlalchemy.util as util
Expand Down Expand Up @@ -71,20 +69,6 @@ def process(value):

return process

_reg = re.compile(r"(\d+)-(\d+)-(\d+)")

def result_processor(self, dialect, coltype):
def process(value):
if isinstance(value, str):
m = self._reg.match(value)
if not m:
raise ValueError(f"could not parse {value!r} as a date value")
return datetime.date(*[int(x or 0) for x in m.groups()])
else:
return value

return process


class _CUSTOM_DateTime(SnowflakeType, sqltypes.DateTime):
def literal_processor(self, dialect):
Expand All @@ -95,20 +79,6 @@ def process(value):

return process

_reg = re.compile(r"(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d{0,6}))?")

def result_processor(self, dialect, coltype):
def process(value):
if isinstance(value, str):
m = self._reg.match(value)
if not m:
raise ValueError(f"could not parse {value!r} as a datetime value")
return datetime.datetime(*[int(x or 0) for x in m.groups()])
else:
return value

return process


class _CUSTOM_Time(SnowflakeType, sqltypes.Time):
def literal_processor(self, dialect):
Expand All @@ -119,20 +89,6 @@ def process(value):

return process

_reg = re.compile(r"(\d+):(\d+):(\d+)(?:\.(\d{0,6}))?")

def result_processor(self, dialect, coltype):
def process(value):
if isinstance(value, str):
m = self._reg.match(value)
if not m:
raise ValueError(f"could not parse {value!r} as a time value")
return datetime.time(*[int(x or 0) for x in m.groups()])
else:
return value

return process


class _CUSTOM_Float(SnowflakeType, sqltypes.Float):
def bind_processor(self, dialect):
Expand Down
14 changes: 14 additions & 0 deletions src/snowflake/sqlalchemy/requirements.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,3 +281,17 @@ def implicit_decimal_binds(self):
# parameters in string forms of decimal values.
# Check https://snowflakecomputing.atlassian.net/browse/SNOW-640134 for details on breaking changes discussion.
return exclusions.closed()

@property
def datetime_implicit_bound(self):
# Supporting this would require behavior breaking change to implicitly convert str to datetime when binding
# parameters in string forms of datetime values.
# Check https://snowflakecomputing.atlassian.net/browse/SNOW-640134 for details on breaking changes discussion.
return exclusions.closed()

@property
def timestamp_microseconds_implicit_bound(self):
# Supporting this would require behavior breaking change to implicitly convert str to timestamp when binding
# parameters in string forms of timestamp values.
# Check https://snowflakecomputing.atlassian.net/browse/SNOW-640134 for details on breaking changes discussion.
return exclusions.closed()
19 changes: 1 addition & 18 deletions src/snowflake/sqlalchemy/snowdialect.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@
_CUSTOM_Float,
_CUSTOM_Time,
)
from .util import _sort_columns_by_sequences

colspecs = {
Date: _CUSTOM_Date,
Expand Down Expand Up @@ -326,10 +325,8 @@ def _get_schema_primary_keys(self, connection, schema, **kw):
)
)
ans = {}
key_sequence_order_map = defaultdict(list)
for row in result:
table_name = self.normalize_name(row._mapping["table_name"])
key_sequence_order_map[table_name].append(row._mapping["key_sequence"])
if table_name not in ans:
ans[table_name] = {
"constrained_columns": [],
Expand All @@ -338,12 +335,6 @@ def _get_schema_primary_keys(self, connection, schema, **kw):
ans[table_name]["constrained_columns"].append(
self.normalize_name(row._mapping["column_name"])
)

for k, v in ans.items():
v["constrained_columns"] = _sort_columns_by_sequences(
key_sequence_order_map[k], v["constrained_columns"]
)

return ans

def get_pk_constraint(self, connection, table_name, schema=None, **kw):
Expand Down Expand Up @@ -406,10 +397,8 @@ def _get_schema_foreign_keys(self, connection, schema, **kw):
)
)
foreign_key_map = {}
key_sequence_order_map = defaultdict(list)
for row in result:
name = self.normalize_name(row._mapping["fk_name"])
key_sequence_order_map[name].append(row._mapping["key_sequence"])
if name not in foreign_key_map:
referred_schema = self.normalize_name(row._mapping["pk_schema_name"])
foreign_key_map[name] = {
Expand Down Expand Up @@ -453,13 +442,7 @@ def _get_schema_foreign_keys(self, connection, schema, **kw):

ans = {}

for k, v in foreign_key_map.items():
v["constrained_columns"] = _sort_columns_by_sequences(
key_sequence_order_map[k], v["constrained_columns"]
)
v["referred_columns"] = _sort_columns_by_sequences(
key_sequence_order_map[k], v["referred_columns"]
)
for _, v in foreign_key_map.items():
if v["table_name"] not in ans:
ans[v["table_name"]] = []
ans[v["table_name"]].append(
Expand Down
4 changes: 0 additions & 4 deletions src/snowflake/sqlalchemy/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,3 @@ def sep(is_first_parameter):
ret += sep(is_first_parameter) + p + "=" + encoded_value
is_first_parameter = False
return ret


def _sort_columns_by_sequences(columns_sequences, columns):
return [col for _, col in sorted(zip(columns_sequences, columns))]
15 changes: 15 additions & 0 deletions tests/sqlalchemy_test_suite/test_suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
from sqlalchemy.schema import Column, Sequence, Table
from sqlalchemy.testing import config
from sqlalchemy.testing.assertions import eq_
from sqlalchemy.testing.suite import (
CompositeKeyReflectionTest as _CompositeKeyReflectionTest,
)
from sqlalchemy.testing.suite import FetchLimitOffsetTest as _FetchLimitOffsetTest
from sqlalchemy.testing.suite import HasSequenceTest as _HasSequenceTest
from sqlalchemy.testing.suite import InsertBehaviorTest as _InsertBehaviorTest
Expand Down Expand Up @@ -134,3 +137,15 @@ def test_delete(self, connection):
connection.execute(t.select().order_by(t.c.id)).fetchall(),
[(1, "d1"), (3, "d3")],
)


class CompositeKeyReflectionTest(_CompositeKeyReflectionTest):
@pytest.mark.xfail(reason="Fixing this would require behavior breaking change.")
def test_fk_column_order(self):
# Check https://snowflakecomputing.atlassian.net/browse/SNOW-640134 for details on breaking changes discussion.
super().test_fk_column_order()

@pytest.mark.xfail(reason="Fixing this would require behavior breaking change.")
def test_pk_column_order(self):
# Check https://snowflakecomputing.atlassian.net/browse/SNOW-640134 for details on breaking changes discussion.
super().test_pk_column_order()
2 changes: 1 addition & 1 deletion tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ def test_get_multile_column_primary_key(engine_testaccount):
assert columns_in_mytable[1]["primary_key"], "primary key"

primary_keys = inspector.get_pk_constraint("mytable")
assert primary_keys["constrained_columns"] == ["id", "gid"]
assert primary_keys["constrained_columns"] == ["gid", "id"]

finally:
mytable.drop(engine_testaccount)
Expand Down

0 comments on commit eea93a9

Please sign in to comment.