Skip to content

Commit

Permalink
SNOW-638841: ints returned as Decimals when upgrading from version 1.…
Browse files Browse the repository at this point in the history
…3.4 to 1.4.0 (#335)
  • Loading branch information
sfc-gh-aling authored Aug 8, 2022
1 parent 50a8b4e commit e0ade9c
Show file tree
Hide file tree
Showing 5 changed files with 149 additions and 26 deletions.
2 changes: 2 additions & 0 deletions DESCRIPTION.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@ Source code is also available at:
# Release Notes

- v1.4.1(Unreleased)

- snowflake-sqlalchemy is now SQLAlchemy 2.0 compatible.
- Fixed a bug that `DATE` should not be removed from `SnowflakeDialect.ischema_names`.
- Fixed a breaking change introduced in release 1.4.0 that changed the behavior of processing numeric values returned from service.

- v1.4.0(July 20, 2022)

Expand Down
16 changes: 0 additions & 16 deletions src/snowflake/sqlalchemy/custom_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
# Copyright (c) 2012-2022 Snowflake Computing Inc. All rights reserved.
#
import datetime
import decimal
import re

import sqlalchemy.types as sqltypes
Expand Down Expand Up @@ -144,18 +143,3 @@ class _CUSTOM_DECIMAL(SnowflakeType, sqltypes.DECIMAL):
@util.memoized_property
def _type_affinity(self):
return sqltypes.INTEGER if self.scale == 0 else sqltypes.DECIMAL


class _CUSTOM_Numeric(SnowflakeType, sqltypes.Numeric):
def result_processor(self, dialect, coltype):
if self.asdecimal:

def process(value):
if value:
return decimal.Decimal(value)
else:
return None

return process
else:
return _process_float
7 changes: 7 additions & 0 deletions src/snowflake/sqlalchemy/requirements.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,3 +274,10 @@ def json_type(self):
def implements_get_lastrowid(self):
# TODO: need connector lastrowid support, check SNOW-11155
return exclusions.closed()

@property
def implicit_decimal_binds(self):
# Supporting this would require behavior breaking change to implicitly convert str to Decimal when binding
# parameters in string forms of decimal values.
# Check https://snowflakecomputing.atlassian.net/browse/SNOW-640134 for details on breaking changes discussion.
return exclusions.closed()
3 changes: 0 additions & 3 deletions src/snowflake/sqlalchemy/snowdialect.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
Date,
DateTime,
Float,
Numeric,
Time,
)

Expand All @@ -60,7 +59,6 @@
_CUSTOM_Date,
_CUSTOM_DateTime,
_CUSTOM_Float,
_CUSTOM_Numeric,
_CUSTOM_Time,
)
from .util import _sort_columns_by_sequences
Expand All @@ -70,7 +68,6 @@
DateTime: _CUSTOM_DateTime,
Time: _CUSTOM_Time,
Float: _CUSTOM_Float,
Numeric: _CUSTOM_Numeric,
}

ischema_names = {
Expand Down
147 changes: 140 additions & 7 deletions tests/test_core.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
#
# Copyright (c) 2012-2022 Snowflake Computing Inc. All rights reserved.
#

import decimal
import json
import os
import random
import re
import string
import time
from datetime import date, datetime
from unittest.mock import patch

import pytest
import pytz
from sqlalchemy import (
REAL,
Boolean,
Expand Down Expand Up @@ -44,6 +47,9 @@

THIS_DIR = os.path.dirname(os.path.realpath(__file__))

PST_TZ = "America/Los_Angeles"
JST_TZ = "Asia/Tokyo"


def _create_users_addresses_tables(
engine_testaccount, metadata, fk=None, pk=None, uq=None
Expand Down Expand Up @@ -1609,16 +1615,14 @@ def test_empty_comments(engine_testaccount):
def test_column_type_schema(engine_testaccount):
with engine_testaccount.connect() as conn:
table_name = random_string(5)
# column type FIXED not supported, triggers SQL compilation error: Unsupported data type 'FIXED'.
conn.exec_driver_sql(
f"""\
CREATE TEMP TABLE {table_name} (
C1 BIGINT, C2 BINARY, C3 BOOLEAN, C4 CHAR, C5 CHARACTER, C6 DATE, C7 DATETIME, C8 DEC,
C9 DECIMAL, C10 DOUBLE,
-- C11 FIXED, # SQL compilation error: Unsupported data type 'FIXED'.
C12 FLOAT, C13 INT, C14 INTEGER, C15 NUMBER, C16 REAL, C17 BYTEINT, C18 SMALLINT,
C19 STRING, C20 TEXT, C21 TIME, C22 TIMESTAMP, C23 TIMESTAMP_TZ, C24 TIMESTAMP_LTZ,
C25 TIMESTAMP_NTZ, C26 TINYINT, C27 VARBINARY, C28 VARCHAR, C29 VARIANT,
C30 OBJECT, C31 ARRAY, C32 GEOGRAPHY
C9 DECIMAL, C10 DOUBLE, C11 FLOAT, C12 INT, C13 INTEGER, C14 NUMBER, C15 REAL, C16 BYTEINT,
C17 SMALLINT, C18 STRING, C19 TEXT, C20 TIME, C21 TIMESTAMP, C22 TIMESTAMP_TZ, C23 TIMESTAMP_LTZ,
C24 TIMESTAMP_NTZ, C25 TINYINT, C26 VARBINARY, C27 VARCHAR, C28 VARIANT, C29 OBJECT, C30 ARRAY, C31 GEOGRAPHY
)
"""
)
Expand All @@ -1630,3 +1634,132 @@ def test_column_type_schema(engine_testaccount):
assert (
len(columns) == len(ischema_names_baseline) - 1
) # -1 because FIXED is not supported


def test_result_type_and_value(engine_testaccount):
with engine_testaccount.connect() as conn:
table_name = random_string(5)
conn.exec_driver_sql(
f"""\
CREATE TEMP TABLE {table_name} (
C1 BIGINT, C2 BINARY, C3 BOOLEAN, C4 CHAR, C5 CHARACTER, C6 DATE, C7 DATETIME, C8 DEC(12,3),
C9 DECIMAL(12,3), C10 DOUBLE, C11 FLOAT, C12 INT, C13 INTEGER, C14 NUMBER, C15 REAL, C16 BYTEINT,
C17 SMALLINT, C18 STRING, C19 TEXT, C20 TIME, C21 TIMESTAMP, C22 TIMESTAMP_TZ, C23 TIMESTAMP_LTZ,
C24 TIMESTAMP_NTZ, C25 TINYINT, C26 VARBINARY, C27 VARCHAR, C28 VARIANT, C29 OBJECT, C30 ARRAY, C31 GEOGRAPHY
)
"""
)
table_reflected = Table(
table_name, MetaData(), autoload=True, autoload_with=conn
)
current_date = date.today()
current_utctime = datetime.utcnow()
current_localtime = pytz.utc.localize(current_utctime, is_dst=False).astimezone(
pytz.timezone(PST_TZ)
)
current_localtime_without_tz = datetime.now()
current_localtime_with_other_tz = pytz.utc.localize(
current_localtime_without_tz, is_dst=False
).astimezone(pytz.timezone(JST_TZ))
TIME_VALUE = current_utctime.time()
DECIMAL_VALUE = decimal.Decimal("123456789.123")
MAX_INT_VALUE = 99999999999999999999999999999999999999
MIN_INT_VALUE = -99999999999999999999999999999999999999
FLOAT_VALUE = 123456789.123
STRING_VALUE = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
BINARY_VALUE = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
CHAR_VALUE = "A"
GEOGRAPHY_VALUE = "POINT(-122.35 37.55)"
GEOGRAPHY_RESULT_VALUE = '{"coordinates": [-122.35,37.55],"type": "Point"}'

ins = table_reflected.insert().values(
c1=MAX_INT_VALUE, # BIGINT
c2=BINARY_VALUE, # BINARY
c3=True, # BOOLEAN
c4=CHAR_VALUE, # CHAR
c5=CHAR_VALUE, # CHARACTER
c6=current_date, # DATE
c7=current_localtime_without_tz, # DATETIME
c8=DECIMAL_VALUE, # DEC(12,3)
c9=DECIMAL_VALUE, # DECIMAL(12,3)
c10=FLOAT_VALUE, # DOUBLE
c11=FLOAT_VALUE, # FLOAT
c12=MIN_INT_VALUE, # INT
c13=MAX_INT_VALUE, # INTEGER
c14=MIN_INT_VALUE, # NUMBER
c15=FLOAT_VALUE, # REAL
c16=MAX_INT_VALUE, # BYTEINT
c17=MIN_INT_VALUE, # SMALLINT
c18=STRING_VALUE, # STRING
c19=STRING_VALUE, # TEXT
c20=TIME_VALUE, # TIME
c21=current_utctime, # TIMESTAMP
c22=current_localtime_with_other_tz, # TIMESTAMP_TZ
c23=current_localtime, # TIMESTAMP_LTZ
c24=current_utctime, # TIMESTAMP_NTZ
c25=MAX_INT_VALUE, # TINYINT
c26=BINARY_VALUE, # VARBINARY
c27=STRING_VALUE, # VARCHAR
c28=None, # VARIANT, currently snowflake-sqlalchemy/connector does not support binding variant
c29=None, # OBJECT, currently snowflake-sqlalchemy/connector does not support binding variant
c30=None, # ARRAY, currently snowflake-sqlalchemy/connector does not support binding variant
c31=GEOGRAPHY_VALUE, # GEOGRAPHY
)
conn.execute(ins)

results = conn.execute(select(table_reflected)).fetchall()
assert len(results) == 1
result = results[0]
assert (
result[0] == MAX_INT_VALUE
and result[1] == BINARY_VALUE
and result[2] is True
and result[3] == CHAR_VALUE
and result[4] == CHAR_VALUE
and result[5] == current_date
and result[6] == current_localtime_without_tz
and result[7] == DECIMAL_VALUE
and result[8] == DECIMAL_VALUE
and result[9] == FLOAT_VALUE
and result[10] == FLOAT_VALUE
and result[11] == MIN_INT_VALUE
and result[12] == MAX_INT_VALUE
and result[13] == MIN_INT_VALUE
and result[14] == FLOAT_VALUE
and result[15] == MAX_INT_VALUE
and result[16] == MIN_INT_VALUE
and result[17] == STRING_VALUE
and result[18] == STRING_VALUE
and result[19] == TIME_VALUE
and result[20] == current_utctime
and result[21] == current_localtime_with_other_tz
and result[22] == current_localtime
and result[23] == current_utctime
and result[24] == MAX_INT_VALUE
and result[25] == BINARY_VALUE
and result[26] == STRING_VALUE
and result[27] is None
and result[28] is None
and result[29] is None
and json.loads(result[30]) == json.loads(GEOGRAPHY_RESULT_VALUE)
)

sql = f"""
INSERT INTO {table_name}(c28, c29, c30)
SELECT PARSE_JSON('{{"vk1":100, "vk2":200, "vk3":300}}'),
OBJECT_CONSTRUCT('vk1', 100, 'vk2', 200, 'vk3', 300),
PARSE_JSON('[
{{"k":1, "v":"str1"}},
{{"k":2, "v":"str2"}},
{{"k":3, "v":"str3"}}]'
)"""
conn.exec_driver_sql(sql)
results = conn.execute(select(table_reflected)).fetchall()
assert len(results) == 2
data = json.loads(results[-1][27])
assert json.loads(results[-1][28]) == data
assert data["vk1"] == 100
assert data["vk3"] == 300
assert data is not None
data = json.loads(results[-1][29])
assert data[1]["k"] == 2

0 comments on commit e0ade9c

Please sign in to comment.