diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2076aec5e1..cdd675cd9d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -413,7 +413,7 @@ jobs: steps: - name: Run MySQL 5.7 run: | - docker run -e MYSQL_ROOT_PASSWORD=mysql -p 3306:3306 -d mysql:5.7 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin + docker run -e MYSQL_ROOT_PASSWORD=mysql -p 3306:3306 -d mysql:5.7 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --explicit-defaults-for-timestamp - uses: actions/checkout@v4 diff --git a/core/src/integration-test/java/com/scalar/db/storage/jdbc/ConsensusCommitAdminImportTableIntegrationTestWithJdbcDatabase.java b/core/src/integration-test/java/com/scalar/db/storage/jdbc/ConsensusCommitAdminImportTableIntegrationTestWithJdbcDatabase.java index 925f667014..d589c140cc 100644 --- a/core/src/integration-test/java/com/scalar/db/storage/jdbc/ConsensusCommitAdminImportTableIntegrationTestWithJdbcDatabase.java +++ b/core/src/integration-test/java/com/scalar/db/storage/jdbc/ConsensusCommitAdminImportTableIntegrationTestWithJdbcDatabase.java @@ -1,10 +1,10 @@ package com.scalar.db.storage.jdbc; -import com.scalar.db.api.TableMetadata; +import com.scalar.db.api.DistributedStorageAdminImportTableIntegrationTestBase.TestData; import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.transaction.consensuscommit.ConsensusCommitAdminImportTableIntegrationTestBase; import java.sql.SQLException; -import java.util.Map; +import java.util.List; import java.util.Properties; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledIf; @@ -44,8 +44,7 @@ public void afterAll() { } @Override - protected Map createExistingDatabaseWithAllDataTypes() - throws SQLException { + protected List createExistingDatabaseWithAllDataTypes() throws SQLException { return testUtils.createExistingDatabaseWithAllDataTypes(getNamespace()); } diff --git a/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminImportTableIntegrationTest.java b/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminImportTableIntegrationTest.java index 3055572108..3a27e8e633 100644 --- a/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminImportTableIntegrationTest.java +++ b/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminImportTableIntegrationTest.java @@ -1,10 +1,9 @@ package com.scalar.db.storage.jdbc; import com.scalar.db.api.DistributedStorageAdminImportTableIntegrationTestBase; -import com.scalar.db.api.TableMetadata; import com.scalar.db.exception.storage.ExecutionException; import java.sql.SQLException; -import java.util.Map; +import java.util.List; import java.util.Properties; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledIf; @@ -44,8 +43,7 @@ public void afterAll() { } @Override - protected Map createExistingDatabaseWithAllDataTypes() - throws SQLException { + protected List createExistingDatabaseWithAllDataTypes() throws SQLException { return testUtils.createExistingDatabaseWithAllDataTypes(getNamespace()); } diff --git a/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminImportTestUtils.java b/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminImportTestUtils.java index 65d03522bb..7feebb46c1 100644 --- a/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminImportTestUtils.java +++ b/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminImportTestUtils.java @@ -1,14 +1,39 @@ package com.scalar.db.storage.jdbc; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.scalar.db.api.DistributedStorageAdminImportTableIntegrationTestBase.TestData; +import com.scalar.db.api.Get; +import com.scalar.db.api.Insert; +import com.scalar.db.api.InsertBuilder; +import com.scalar.db.api.Put; +import com.scalar.db.api.PutBuilder; import com.scalar.db.api.TableMetadata; import com.scalar.db.config.DatabaseConfig; +import com.scalar.db.io.BigIntColumn; +import com.scalar.db.io.BlobColumn; +import com.scalar.db.io.BooleanColumn; +import com.scalar.db.io.Column; import com.scalar.db.io.DataType; +import com.scalar.db.io.DateColumn; +import com.scalar.db.io.DoubleColumn; +import com.scalar.db.io.FloatColumn; +import com.scalar.db.io.IntColumn; +import com.scalar.db.io.Key; +import com.scalar.db.io.TextColumn; +import com.scalar.db.io.TimeColumn; +import com.scalar.db.io.TimestampColumn; +import com.scalar.db.io.TimestampTZColumn; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.SQLException; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; @@ -16,6 +41,7 @@ import java.util.Properties; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nullable; import org.apache.commons.dbcp2.BasicDataSource; public class JdbcAdminImportTestUtils { @@ -24,16 +50,12 @@ public class JdbcAdminImportTestUtils { Arrays.asList( "BIGINT UNSIGNED", "BIT(8)", - "DATE", - "DATETIME", "DECIMAL(8,2)", "ENUM('a','b')", "SET('a','b')", "GEOMETRY", "JSON", // we remove this for MariaDB because it is an alias of a supported type, LONGTEXT "NUMERIC", - "TIME", - "TIMESTAMP", "YEAR"); static final List UNSUPPORTED_DATA_TYPES_PGSQL = Arrays.asList( @@ -43,7 +65,6 @@ public class JdbcAdminImportTestUtils { "box", "cidr", "circle", - "date", "inet", "interval", "json", @@ -60,10 +81,7 @@ public class JdbcAdminImportTestUtils { "polygon", "serial", "smallserial", - "time", "time with time zone", - "timestamp", - "timestamp with time zone", "tsquery", "tsvector", "txid_snapshot", @@ -74,34 +92,24 @@ public class JdbcAdminImportTestUtils { static final List UNSUPPORTED_DATA_TYPES_ORACLE = Arrays.asList( "BFILE", - "DATE", "FLOAT(54)", "INT", "INTERVAL YEAR(3) TO MONTH", "INTERVAL DAY(2) TO SECOND", "NUMBER(16,0)", "ROWID", - "TIMESTAMP", - "TIMESTAMP WITH TIME ZONE", - "TIMESTAMP WITH LOCAL TIME ZONE", "UROWID"); static final List UNSUPPORTED_DATA_TYPES_ORACLE_V20_OR_LATER = Collections.singletonList("JSON"); static final List UNSUPPORTED_DATA_TYPES_MSSQL = Arrays.asList( - "date", - "datetime", - "datetime2", - "datetimeoffset", "decimal(8,2)", "hierarchyid", "money", "numeric(8,2)", "rowversion", - "smalldatetime", "smallmoney", "sql_variant", - "time", "uniqueidentifier", "xml", "geometry", @@ -122,7 +130,7 @@ public JdbcAdminImportTestUtils(Properties properties) { @Override protected final void finalize() {} - public Map createExistingDatabaseWithAllDataTypes(String namespace) + public List createExistingDatabaseWithAllDataTypes(String namespace) throws SQLException { execute(rdbEngine.createSchemaSqls(namespace)); if (JdbcTestUtils.isMysql(rdbEngine)) { @@ -179,9 +187,14 @@ private LinkedHashMap prepareColumnsForMysql() { columns.put("col18", "TINYBLOB"); columns.put("col19", "MEDIUMBLOB"); columns.put("col20", "LONGBLOB"); - columns.put("col21", "BINARY(255)"); + columns.put("col21", "BINARY(5)"); + columns.put("col22", "DATE"); + columns.put("col23", "TIME(6)"); + columns.put("col24", "DATETIME(6)"); + columns.put("col25", "DATETIME(6)"); // override to TIMESTAMPTZ + columns.put("col26", "TIMESTAMP(6)"); if (isMariaDB()) { - columns.put("col22", "JSON"); + columns.put("col27", "JSON"); } return columns; } @@ -212,14 +225,34 @@ private TableMetadata prepareTableMetadataForMysql() { .addColumn("col19", DataType.BLOB) .addColumn("col20", DataType.BLOB) .addColumn("col21", DataType.BLOB) + .addColumn("col22", DataType.DATE) + .addColumn("col23", DataType.TIME) + .addColumn("col24", DataType.TIMESTAMP) + .addColumn("col25", DataType.TIMESTAMPTZ) + .addColumn("col26", DataType.TIMESTAMPTZ) .addPartitionKey("pk1") .addPartitionKey("pk2"); if (isMariaDB()) { - builder.addColumn("col22", DataType.TEXT); + builder.addColumn("col27", DataType.TEXT); } return builder.build(); } + private Map> prepareInsertColumnsForMysql(TableMetadata metadata) { + ImmutableList.Builder> customColumns = new ImmutableList.Builder<>(); + customColumns.add( + TimestampTZColumn.of( + "col26", LocalDateTime.of(2005, 10, 11, 8, 35).toInstant(ZoneOffset.UTC))); + if (isMariaDB()) { + customColumns.add(TextColumn.of("col27", "{\"foo\": \"bar\"}")); + } + return prepareInsertColumns(metadata, customColumns.build()); + } + + private Map prepareOverrideColumnsTypeForMysql() { + return ImmutableMap.of("col25", DataType.TIMESTAMPTZ); + } + private LinkedHashMap prepareColumnsForPostgresql() { LinkedHashMap columns = new LinkedHashMap<>(); columns.put("pk1", "integer"); @@ -230,10 +263,14 @@ private LinkedHashMap prepareColumnsForPostgresql() { columns.put("col04", "bigint"); columns.put("col05", "real"); columns.put("col06", "double precision"); - columns.put("col07", "char(8)"); + columns.put("col07", "char(3)"); columns.put("col08", "varchar(512)"); columns.put("col09", "text"); columns.put("col10", "bytea"); + columns.put("col11", "date"); + columns.put("col12", "time"); + columns.put("col13", "timestamp"); + columns.put("col14", "timestamp with time zone"); return columns; } @@ -251,6 +288,10 @@ private TableMetadata prepareTableMetadataForPostgresql() { .addColumn("col08", DataType.TEXT) .addColumn("col09", DataType.TEXT) .addColumn("col10", DataType.BLOB) + .addColumn("col11", DataType.DATE) + .addColumn("col12", DataType.TIME) + .addColumn("col13", DataType.TIMESTAMP) + .addColumn("col14", DataType.TIMESTAMPTZ) .addPartitionKey("pk1") .addPartitionKey("pk2") .build(); @@ -258,25 +299,37 @@ private TableMetadata prepareTableMetadataForPostgresql() { private LinkedHashMap prepareColumnsForOracle() { LinkedHashMap columns = new LinkedHashMap<>(); - columns.put("pk1", "CHAR(8)"); - columns.put("pk2", "CHAR(8)"); + columns.put("pk1", "CHAR(3)"); + columns.put("pk2", "CHAR(3)"); columns.put("col01", "NUMERIC(15,0)"); columns.put("col02", "NUMERIC(15,2)"); columns.put("col03", "FLOAT(53)"); columns.put("col04", "BINARY_FLOAT"); columns.put("col05", "BINARY_DOUBLE"); - columns.put("col06", "CHAR(8)"); + columns.put("col06", "CHAR(3)"); columns.put("col07", "VARCHAR2(512)"); - columns.put("col08", "NCHAR(8)"); + columns.put("col08", "NCHAR(3)"); columns.put("col09", "NVARCHAR2(512)"); columns.put("col10", "CLOB"); columns.put("col11", "NCLOB"); columns.put("col12", "LONG"); columns.put("col13", "BLOB"); columns.put("col14", "RAW(1024)"); + columns.put("col15", "DATE"); + columns.put("col16", "DATE"); // override to TIME + columns.put("col17", "DATE"); // override to TIMESTAMP + columns.put("col18", "TIMESTAMP"); + columns.put("col19", "TIMESTAMP"); // override to TIME + columns.put("col20", "TIMESTAMP WITH TIME ZONE"); + columns.put("col21", "TIMESTAMP WITH LOCAL TIME ZONE"); return columns; } + private Map prepareOverrideColumnsTypeForOracle() { + return ImmutableMap.of( + "col16", DataType.TIME, "col17", DataType.TIMESTAMP, "col19", DataType.TIME); + } + private TableMetadata prepareTableMetadataForOracle() { return TableMetadata.newBuilder() .addColumn("pk1", DataType.TEXT) @@ -295,15 +348,30 @@ private TableMetadata prepareTableMetadataForOracle() { .addColumn("col12", DataType.TEXT) .addColumn("col13", DataType.BLOB) .addColumn("col14", DataType.BLOB) + .addColumn("col15", DataType.DATE) + .addColumn("col16", DataType.TIME) + .addColumn("col17", DataType.TIMESTAMP) + .addColumn("col18", DataType.TIMESTAMP) + .addColumn("col19", DataType.TIME) + .addColumn("col20", DataType.TIMESTAMPTZ) + .addColumn("col21", DataType.TIMESTAMPTZ) .addPartitionKey("pk1") .addPartitionKey("pk2") .build(); } + private Map> prepareInsertColumnsForOracle(TableMetadata metadata) { + List> customColumns = + ImmutableList.of( + TimeColumn.of("col16", LocalTime.of(11, 8, 35)), + TimestampColumn.of("col17", LocalDateTime.of(1905, 10, 11, 8, 35))); + return prepareInsertColumns(metadata, customColumns); + } + private LinkedHashMap prepareColumnsForOracleLongRaw() { LinkedHashMap columns = new LinkedHashMap<>(); - columns.put("pk1", "CHAR(8)"); - columns.put("pk2", "CHAR(8)"); + columns.put("pk1", "CHAR(3)"); + columns.put("pk2", "CHAR(3)"); columns.put("col", "LONG RAW"); return columns; } @@ -329,15 +397,22 @@ private LinkedHashMap prepareColumnsForSqlServer() { columns.put("col05", "bigint"); columns.put("col06", "real"); columns.put("col07", "float"); - columns.put("col08", "char(8)"); + columns.put("col08", "char(3)"); columns.put("col09", "varchar(512)"); - columns.put("col10", "nchar(8)"); + columns.put("col10", "nchar(3)"); columns.put("col11", "nvarchar(512)"); columns.put("col12", "text"); columns.put("col13", "ntext"); - columns.put("col14", "binary"); - columns.put("col15", "varbinary"); + columns.put("col14", "binary(5)"); + columns.put("col15", "varbinary(5)"); columns.put("col16", "image"); + columns.put("col17", "date"); + columns.put("col18", "time"); + columns.put("col19", "datetime"); + columns.put("col20", "datetime2"); + columns.put("col21", "smalldatetime"); + columns.put("col22", "datetimeoffset"); + return columns; } @@ -361,19 +436,35 @@ private TableMetadata prepareTableMetadataForSqlServer() { .addColumn("col14", DataType.BLOB) .addColumn("col15", DataType.BLOB) .addColumn("col16", DataType.BLOB) + .addColumn("col17", DataType.DATE) + .addColumn("col18", DataType.TIME) + .addColumn("col19", DataType.TIMESTAMP) + .addColumn("col20", DataType.TIMESTAMP) + .addColumn("col21", DataType.TIMESTAMP) + .addColumn("col22", DataType.TIMESTAMPTZ) .addPartitionKey("pk1") .addPartitionKey("pk2") .build(); } - private Map prepareCreateNonImportableTableSql( + private Map> prepareInsertColumnsForSqlServer(TableMetadata metadata) { + List> customColumns = + ImmutableList.of( + TimestampColumn.of("col19", LocalDateTime.of(1905, 10, 11, 8, 35, 14, 123_000_000)), + TimestampColumn.of("col21", LocalDateTime.of(1905, 10, 11, 8, 35))); + return prepareInsertColumns(metadata, customColumns); + } + + private List prepareCreateNonImportableTableSql( String namespace, List types) { - Map tables = new HashMap<>(); + ImmutableList.Builder data = new ImmutableList.Builder<>(); for (int i = 0; i < types.size(); i++) { String table = "bad_table" + i; - tables.put(table, prepareCreateNonImportableTableSql(namespace, table, types.get(i))); + data.add( + JdbcTestData.createUnsupportedTable( + table, prepareCreateNonImportableTableSql(namespace, table, types.get(i)))); } - return tables; + return data.build(); } private String prepareCreateNonImportableTableSql(String namespace, String table, String type) { @@ -400,51 +491,60 @@ private String prepareCreateTableSql( + "))"; } - private Map createExistingMysqlDatabaseWithAllDataTypes(String namespace) + private List createExistingMysqlDatabaseWithAllDataTypes(String namespace) throws SQLException { + List data = new ArrayList<>(); TableMetadata tableMetadata = prepareTableMetadataForMysql(); - Map supportedTables = - Collections.singletonMap( + String sql = + prepareCreateTableSql( + namespace, + SUPPORTED_TABLE_NAME, + prepareColumnsForMysql(), + tableMetadata.getPartitionKeyNames()); + data.add( + JdbcTestData.createSupportedTable( SUPPORTED_TABLE_NAME, - prepareCreateTableSql( - namespace, - SUPPORTED_TABLE_NAME, - prepareColumnsForMysql(), - tableMetadata.getPartitionKeyNames())); - Map supportedTableMetadata = - Collections.singletonMap(SUPPORTED_TABLE_NAME, tableMetadata); - - Map unsupportedTables; + sql, + tableMetadata, + prepareOverrideColumnsTypeForMysql(), + prepareInsertColumnsForMysql(tableMetadata))); + if (isMariaDB()) { - unsupportedTables = + data.addAll( prepareCreateNonImportableTableSql( namespace, UNSUPPORTED_DATA_TYPES_MYSQL.stream() .filter(type -> !type.equalsIgnoreCase("JSON")) - .collect(Collectors.toList())); + .collect(Collectors.toList()))); } else { - unsupportedTables = - prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_MYSQL); + data.addAll(prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_MYSQL)); } - return executeCreateTableSql(supportedTables, supportedTableMetadata, unsupportedTables); + executeCreateTableSql(data); + + return ImmutableList.copyOf(data); } - private Map createExistingPostgresDatabaseWithAllDataTypes( - String namespace) throws SQLException { + private List createExistingPostgresDatabaseWithAllDataTypes(String namespace) + throws SQLException { + List data = new ArrayList<>(); + TableMetadata tableMetadata = prepareTableMetadataForPostgresql(); - Map supportedTables = - Collections.singletonMap( + String sql = + prepareCreateTableSql( + namespace, + SUPPORTED_TABLE_NAME, + prepareColumnsForPostgresql(), + tableMetadata.getPartitionKeyNames()); + data.add( + JdbcTestData.createSupportedTable( SUPPORTED_TABLE_NAME, - prepareCreateTableSql( - namespace, - SUPPORTED_TABLE_NAME, - prepareColumnsForPostgresql(), - tableMetadata.getPartitionKeyNames())); - Map supportedTableMetadata = - Collections.singletonMap(SUPPORTED_TABLE_NAME, tableMetadata); - - Map unsupportedTables = + sql, + tableMetadata, + Collections.emptyMap(), + prepareInsertColumns(tableMetadata))); + + data.addAll( prepareCreateNonImportableTableSql( namespace, majorVersion >= 13 @@ -452,38 +552,50 @@ private Map createExistingPostgresDatabaseWithAllDataType UNSUPPORTED_DATA_TYPES_PGSQL.stream(), UNSUPPORTED_DATA_TYPES_PGSQL_V13_OR_LATER.stream()) .collect(Collectors.toList()) - : UNSUPPORTED_DATA_TYPES_PGSQL); + : UNSUPPORTED_DATA_TYPES_PGSQL)); + + executeCreateTableSql(data); - return executeCreateTableSql(supportedTables, supportedTableMetadata, unsupportedTables); + return ImmutableList.copyOf(data); } - private Map createExistingOracleDatabaseWithAllDataTypes(String namespace) + private List createExistingOracleDatabaseWithAllDataTypes(String namespace) throws SQLException { - Map supportedTables = new HashMap<>(); - Map supportedTableMetadata = new HashMap<>(); + List data = new ArrayList<>(); TableMetadata tableMetadata = prepareTableMetadataForOracle(); - supportedTables.put( - SUPPORTED_TABLE_NAME, + String sql = prepareCreateTableSql( namespace, SUPPORTED_TABLE_NAME, prepareColumnsForOracle(), - tableMetadata.getPartitionKeyNames())); - supportedTableMetadata.put(SUPPORTED_TABLE_NAME, tableMetadata); + tableMetadata.getPartitionKeyNames()); + data.add( + JdbcTestData.createSupportedTable( + SUPPORTED_TABLE_NAME, + sql, + tableMetadata, + prepareOverrideColumnsTypeForOracle(), + prepareInsertColumnsForOracle(tableMetadata))); // LONG columns must be tested with separated tables since they cannot be coexisted TableMetadata longRawTableMetadata = prepareTableMetadataForOracleForLongRaw(); - supportedTables.put( - SUPPORTED_TABLE_NAME + "_long_raw", + String longRawSupportedTable = SUPPORTED_TABLE_NAME + "_long_raw"; + String longRawSql = prepareCreateTableSql( namespace, - SUPPORTED_TABLE_NAME + "_long_raw", + longRawSupportedTable, prepareColumnsForOracleLongRaw(), - longRawTableMetadata.getPartitionKeyNames())); - supportedTableMetadata.put(SUPPORTED_TABLE_NAME + "_long_raw", longRawTableMetadata); - - Map unsupportedTables = + longRawTableMetadata.getPartitionKeyNames()); + data.add( + JdbcTestData.createSupportedTable( + longRawSupportedTable, + longRawSql, + longRawTableMetadata, + Collections.emptyMap(), + prepareInsertColumns(longRawTableMetadata))); + + data.addAll( prepareCreateNonImportableTableSql( namespace, majorVersion >= 20 @@ -491,55 +603,42 @@ private Map createExistingOracleDatabaseWithAllDataTypes( UNSUPPORTED_DATA_TYPES_ORACLE.stream(), UNSUPPORTED_DATA_TYPES_ORACLE_V20_OR_LATER.stream()) .collect(Collectors.toList()) - : UNSUPPORTED_DATA_TYPES_ORACLE); + : UNSUPPORTED_DATA_TYPES_ORACLE)); - return executeCreateTableSql(supportedTables, supportedTableMetadata, unsupportedTables); + executeCreateTableSql(data); + + return ImmutableList.copyOf(data); } - private Map createExistingSqlServerDatabaseWithAllDataTypes( - String namespace) throws SQLException { + private List createExistingSqlServerDatabaseWithAllDataTypes(String namespace) + throws SQLException { + List data = new ArrayList<>(); + TableMetadata tableMetadata = prepareTableMetadataForSqlServer(); - Map supportedTables = - Collections.singletonMap( + String sql = + prepareCreateTableSql( + namespace, SUPPORTED_TABLE_NAME, - prepareCreateTableSql( - namespace, - SUPPORTED_TABLE_NAME, - prepareColumnsForSqlServer(), - tableMetadata.getPartitionKeyNames())); - Map supportedTableMetadata = - Collections.singletonMap(SUPPORTED_TABLE_NAME, tableMetadata); - - Map unsupportedTables = - prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_MSSQL); - - return executeCreateTableSql(supportedTables, supportedTableMetadata, unsupportedTables); - } + prepareColumnsForSqlServer(), + tableMetadata.getPartitionKeyNames()); + data.add( + JdbcTestData.createSupportedTable( + SUPPORTED_TABLE_NAME, + sql, + tableMetadata, + Collections.emptyMap(), + prepareInsertColumnsForSqlServer(tableMetadata))); - private Map executeCreateTableSql( - Map supportedTables, - Map supportedTableMetadata, - Map unsupportedTables) - throws SQLException { - Map results = new HashMap<>(); - List sqls = new ArrayList<>(); + data.addAll(prepareCreateNonImportableTableSql(namespace, UNSUPPORTED_DATA_TYPES_MSSQL)); - // table with all supported columns - supportedTables.forEach( - (table, sql) -> { - sqls.add(sql); - results.put(table, supportedTableMetadata.get(table)); - }); + executeCreateTableSql(data); - // tables with an unsupported column - unsupportedTables.forEach( - (table, sql) -> { - sqls.add(sql); - results.put(table, null); - }); + return ImmutableList.copyOf(data); + } - execute(sqls.toArray(new String[0])); - return results; + private void executeCreateTableSql(List data) throws SQLException { + String[] sqls = data.stream().map(JdbcTestData::getCreateTableSql).toArray(String[]::new); + execute(sqls); } private boolean isMariaDB() { @@ -559,7 +658,178 @@ private int getMajorVersion() { } } + private Map> prepareInsertColumns( + TableMetadata tableMetadata, List> customColumns) { + Map> columnsByName = prepareInsertColumns(tableMetadata); + Map> customColumnsByName = + customColumns.stream().collect(Collectors.toMap(Column::getName, column -> column)); + columnsByName.putAll(customColumnsByName); + + return columnsByName; + } + + private Map> prepareInsertColumns(TableMetadata tableMetadata) { + return tableMetadata.getColumnNames().stream() + .map( + columnName -> + prepareGenericColumnValue(columnName, tableMetadata.getColumnDataType(columnName))) + .collect(Collectors.toMap(Column::getName, column -> column)); + } + + private Column prepareGenericColumnValue(String columnName, DataType columnType) { + switch (columnType) { + case INT: + return IntColumn.of(columnName, 1); + case TEXT: + return TextColumn.of(columnName, "foo"); + case BLOB: + return BlobColumn.of(columnName, "ABCDE".getBytes(StandardCharsets.UTF_8)); + case FLOAT: + return FloatColumn.of(columnName, 1.2F); + case DOUBLE: + return DoubleColumn.of(columnName, 4.23); + case BIGINT: + return BigIntColumn.of(columnName, 101); + case BOOLEAN: + return BooleanColumn.of(columnName, true); + case DATE: + return DateColumn.of(columnName, LocalDate.of(1003, 7, 14)); + case TIME: + return TimeColumn.of(columnName, LocalTime.of(5, 45, 33, 123_456_000)); + case TIMESTAMP: + return TimestampColumn.of(columnName, LocalDateTime.of(1003, 3, 2, 8, 35, 12, 123_000_000)); + case TIMESTAMPTZ: + return TimestampTZColumn.of( + columnName, + LocalDateTime.of(1003, 3, 2, 8, 35, 12, 123_000_000).toInstant(ZoneOffset.UTC)); + default: + throw new AssertionError(); + } + } + public void close() throws SQLException { dataSource.close(); } + + @SuppressWarnings("UseCorrectAssertInTests") + public static class JdbcTestData implements TestData { + + private final String tableName; + private final @Nullable Map overrideColumnsType; + private final @Nullable TableMetadata tableMetadata; + private final @Nullable Map> columns; + private final String createTableSql; + + private JdbcTestData( + String tableName, + String createTableSql, + @Nullable Map overrideColumnsType, + @Nullable TableMetadata tableMetadata, + @Nullable Map> columns) { + this.tableName = tableName; + this.createTableSql = createTableSql; + this.tableMetadata = tableMetadata; + this.overrideColumnsType = overrideColumnsType; + this.columns = columns; + } + + public static JdbcTestData createSupportedTable( + String tableName, + String createTableSql, + TableMetadata tableMetadata, + Map overrideColumnsType, + Map> columns) { + return new JdbcTestData( + tableName, createTableSql, overrideColumnsType, tableMetadata, columns); + } + + public static JdbcTestData createUnsupportedTable(String tableName, String createTableSql) { + return new JdbcTestData(tableName, createTableSql, null, null, null); + } + + @Override + public boolean isSupportedTable() { + return tableMetadata != null; + } + + @Override + public String getTableName() { + return tableName; + } + + private String getCreateTableSql() { + return createTableSql; + } + + @Override + public Map getOverrideColumnsType() { + assert overrideColumnsType != null; + return ImmutableMap.copyOf(overrideColumnsType); + } + + @Override + public TableMetadata getTableMetadata() { + assert tableMetadata != null; + return tableMetadata; + } + + @Override + public Insert getInsert(String namespace, String table) { + assert columns != null; + assert tableMetadata != null; + + InsertBuilder.Buildable insert = + Insert.newBuilder().namespace(namespace).table(table).partitionKey(preparePartitionKey()); + columns.forEach( + (name, column) -> { + if (!tableMetadata.getPartitionKeyNames().contains(name)) { + insert.value(column); + } + }); + + return insert.build(); + } + + @Override + public Put getPut(String namespace, String table) { + assert columns != null; + assert tableMetadata != null; + + PutBuilder.Buildable put = + Put.newBuilder().namespace(namespace).table(table).partitionKey(preparePartitionKey()); + columns.forEach( + (name, column) -> { + if (!tableMetadata.getPartitionKeyNames().contains(name)) { + put.value(column); + } + }); + + return put.build(); + } + + @Override + public Get getGet(String namespace, String table) { + assert columns != null; + + return Get.newBuilder() + .namespace(namespace) + .table(table) + .partitionKey(preparePartitionKey()) + .build(); + } + + private Key preparePartitionKey() { + assert tableMetadata != null; + Key.Builder key = Key.newBuilder(); + tableMetadata + .getPartitionKeyNames() + .forEach( + col -> { + assert columns != null; + key.add(columns.get(col)); + }); + + return key.build(); + } + } } diff --git a/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcSchemaLoaderImportIntegrationTest.java b/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcSchemaLoaderImportIntegrationTest.java index 77dda54b58..c16000eded 100644 --- a/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcSchemaLoaderImportIntegrationTest.java +++ b/core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcSchemaLoaderImportIntegrationTest.java @@ -1,9 +1,13 @@ package com.scalar.db.storage.jdbc; +import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.Uninterruptibles; +import com.scalar.db.api.TableMetadata; import com.scalar.db.config.DatabaseConfig; +import com.scalar.db.io.DataType; import com.scalar.db.schemaloader.SchemaLoaderImportIntegrationTestBase; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; @@ -12,6 +16,7 @@ import org.slf4j.LoggerFactory; public class JdbcSchemaLoaderImportIntegrationTest extends SchemaLoaderImportIntegrationTestBase { + private static final Logger logger = LoggerFactory.getLogger(JdbcSchemaLoaderImportIntegrationTest.class); @@ -31,21 +36,107 @@ protected Properties getProperties(String testName) { @SuppressFBWarnings("SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE") @Override protected void createImportableTable(String namespace, String table) throws Exception { - testUtils.execute( - "CREATE TABLE " - + rdbEngine.encloseFullTableName(namespace, table) - + "(" - + rdbEngine.enclose("pk") - + " CHAR(8)," - + rdbEngine.enclose("col") - + " CHAR(8), PRIMARY KEY(" - + rdbEngine.enclose("pk") - + "))"); + String sql; + + if (JdbcTestUtils.isMysql(rdbEngine)) { + sql = + "CREATE TABLE " + + rdbEngine.encloseFullTableName(namespace, table) + + "(" + + rdbEngine.enclose("pk") + + " CHAR(8)," + + rdbEngine.enclose("col1") + + " CHAR(8)," + + rdbEngine.enclose("col2") + + " DATETIME," + + "PRIMARY KEY(" + + rdbEngine.enclose("pk") + + "))"; + } else if (JdbcTestUtils.isOracle(rdbEngine)) { + sql = + "CREATE TABLE " + + rdbEngine.encloseFullTableName(namespace, table) + + "(" + + rdbEngine.enclose("pk") + + " CHAR(8)," + + rdbEngine.enclose("col1") + + " CHAR(8)," + + rdbEngine.enclose("col2") + + " DATE," + + "PRIMARY KEY(" + + rdbEngine.enclose("pk") + + "))"; + } else if (JdbcTestUtils.isPostgresql(rdbEngine) || JdbcTestUtils.isSqlServer(rdbEngine)) { + sql = + "CREATE TABLE " + + rdbEngine.encloseFullTableName(namespace, table) + + "(" + + rdbEngine.enclose("pk") + + " CHAR(8)," + + rdbEngine.enclose("col1") + + " CHAR(8)," + + "PRIMARY KEY(" + + rdbEngine.enclose("pk") + + "))"; + } else { + throw new AssertionError(); + } + + testUtils.execute(sql); + } + + @Override + protected Map getImportableTableOverrideColumnsType() { + // col1 type override confirms overriding with the default data type mapping does not fail + // col2 really performs a type override + if (JdbcTestUtils.isMysql(rdbEngine)) { + return ImmutableMap.of("col1", DataType.TEXT, "col2", DataType.TIMESTAMPTZ); + } else if (JdbcTestUtils.isOracle(rdbEngine)) { + return ImmutableMap.of("col1", DataType.TEXT, "col2", DataType.TIMESTAMP); + } else if (JdbcTestUtils.isPostgresql(rdbEngine) || JdbcTestUtils.isSqlServer(rdbEngine)) { + return ImmutableMap.of("col1", DataType.TEXT); + } else { + throw new AssertionError(); + } + } + + @Override + protected TableMetadata getImportableTableMetadata(boolean hasTypeOverride) { + TableMetadata.Builder metadata = TableMetadata.newBuilder(); + metadata.addPartitionKey("pk"); + metadata.addColumn("pk", DataType.TEXT); + metadata.addColumn("col1", DataType.TEXT); + + if (JdbcTestUtils.isMysql(rdbEngine)) { + return metadata + .addColumn("col2", hasTypeOverride ? DataType.TIMESTAMPTZ : DataType.TIMESTAMP) + .build(); + } else if (JdbcTestUtils.isOracle(rdbEngine)) { + return metadata + .addColumn("col2", hasTypeOverride ? DataType.TIMESTAMP : DataType.DATE) + .build(); + } else if (JdbcTestUtils.isPostgresql(rdbEngine) || JdbcTestUtils.isSqlServer(rdbEngine)) { + return metadata.build(); + } else { + throw new AssertionError(); + } } @SuppressFBWarnings("SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE") @Override protected void createNonImportableTable(String namespace, String table) throws Exception { + String nonImportableDataType; + if (JdbcTestUtils.isMysql(rdbEngine)) { + nonImportableDataType = "YEAR"; + } else if (JdbcTestUtils.isPostgresql(rdbEngine)) { + nonImportableDataType = "INTERVAL"; + } else if (JdbcTestUtils.isOracle(rdbEngine)) { + nonImportableDataType = "INT"; + } else if (JdbcTestUtils.isSqlServer(rdbEngine)) { + nonImportableDataType = "MONEY"; + } else { + throw new AssertionError(); + } testUtils.execute( "CREATE TABLE " + rdbEngine.encloseFullTableName(namespace, table) @@ -53,7 +144,9 @@ protected void createNonImportableTable(String namespace, String table) throws E + rdbEngine.enclose("pk") + " CHAR(8)," + rdbEngine.enclose("col") - + " DATE, PRIMARY KEY(" + + " " + + nonImportableDataType + + ", PRIMARY KEY(" + rdbEngine.enclose("pk") + "))"); } diff --git a/core/src/main/java/com/scalar/db/api/Admin.java b/core/src/main/java/com/scalar/db/api/Admin.java index 0bff210a17..58ef44a3b6 100644 --- a/core/src/main/java/com/scalar/db/api/Admin.java +++ b/core/src/main/java/com/scalar/db/api/Admin.java @@ -451,7 +451,28 @@ default void addNewColumnToTable( * table does not exist, or if the table does not meet the requirement of ScalarDB table * @throws ExecutionException if the operation fails */ - void importTable(String namespace, String table, Map options) + default void importTable(String namespace, String table, Map options) + throws ExecutionException { + importTable(namespace, table, options, Collections.emptyMap()); + } + + /** + * Imports an existing table that is not managed by ScalarDB. + * + * @param namespace an existing namespace + * @param table an existing table + * @param options options to import + * @param overrideColumnsType a map of a column data type by its name. Only set the column for + * which you want to override the default data type mapping. + * @throws IllegalArgumentException if the table is already managed by ScalarDB, if the target + * table does not exist, or if the table does not meet the requirement of ScalarDB table + * @throws ExecutionException if the operation fails + */ + void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) throws ExecutionException; /** diff --git a/core/src/main/java/com/scalar/db/api/DistributedStorageAdmin.java b/core/src/main/java/com/scalar/db/api/DistributedStorageAdmin.java index d7296a573f..abd7682497 100644 --- a/core/src/main/java/com/scalar/db/api/DistributedStorageAdmin.java +++ b/core/src/main/java/com/scalar/db/api/DistributedStorageAdmin.java @@ -2,6 +2,7 @@ import com.scalar.db.exception.storage.ExecutionException; import com.scalar.db.io.DataType; +import java.util.Map; /** * An administrative interface for distributed storage implementations. The user can execute @@ -47,12 +48,16 @@ public interface DistributedStorageAdmin extends Admin, AutoCloseable { * * @param namespace namespace name of import table * @param table import table name + * @param overrideColumnsType a map of a column data type by its name. Only set the column for + * which you want to override the default data type mapping. * @throws IllegalArgumentException if the table does not exist * @throws IllegalStateException if the table does not meet the requirement of ScalarDB table * @throws ExecutionException if the operation fails * @return import table metadata in the ScalarDB format */ - TableMetadata getImportTableMetadata(String namespace, String table) throws ExecutionException; + TableMetadata getImportTableMetadata( + String namespace, String table, Map overrideColumnsType) + throws ExecutionException; /** * Add a column in the table without updating the metadata table in ScalarDB. diff --git a/core/src/main/java/com/scalar/db/common/CheckedDistributedStorageAdmin.java b/core/src/main/java/com/scalar/db/common/CheckedDistributedStorageAdmin.java index 1ebb2c25dc..462d02132d 100644 --- a/core/src/main/java/com/scalar/db/common/CheckedDistributedStorageAdmin.java +++ b/core/src/main/java/com/scalar/db/common/CheckedDistributedStorageAdmin.java @@ -313,10 +313,11 @@ public Set getNamespaceNames() throws ExecutionException { } @Override - public TableMetadata getImportTableMetadata(String namespace, String table) + public TableMetadata getImportTableMetadata( + String namespace, String table, Map overrideColumnsType) throws ExecutionException { try { - return admin.getImportTableMetadata(namespace, table); + return admin.getImportTableMetadata(namespace, table, overrideColumnsType); } catch (ExecutionException e) { throw new ExecutionException( CoreError.GETTING_IMPORT_TABLE_METADATA_FAILED.buildMessage( @@ -326,7 +327,11 @@ public TableMetadata getImportTableMetadata(String namespace, String table) } @Override - public void importTable(String namespace, String table, Map options) + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) throws ExecutionException { TableMetadata tableMetadata = getTableMetadata(namespace, table); if (tableMetadata != null) { @@ -336,7 +341,7 @@ public void importTable(String namespace, String table, Map opti } try { - admin.importTable(namespace, table, options); + admin.importTable(namespace, table, options, overrideColumnsType); } catch (ExecutionException e) { throw new ExecutionException( CoreError.IMPORTING_TABLE_FAILED.buildMessage( diff --git a/core/src/main/java/com/scalar/db/common/DecoratedDistributedTransactionAdmin.java b/core/src/main/java/com/scalar/db/common/DecoratedDistributedTransactionAdmin.java index c33eb1bbc4..16329e027f 100644 --- a/core/src/main/java/com/scalar/db/common/DecoratedDistributedTransactionAdmin.java +++ b/core/src/main/java/com/scalar/db/common/DecoratedDistributedTransactionAdmin.java @@ -196,9 +196,13 @@ public void addNewColumnToTable( } @Override - public void importTable(String namespace, String table, Map options) + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) throws ExecutionException { - distributedTransactionAdmin.importTable(namespace, table, options); + distributedTransactionAdmin.importTable(namespace, table, options, overrideColumnsType); } @Override diff --git a/core/src/main/java/com/scalar/db/common/error/CoreError.java b/core/src/main/java/com/scalar/db/common/error/CoreError.java index 43646da973..84a886b4e4 100644 --- a/core/src/main/java/com/scalar/db/common/error/CoreError.java +++ b/core/src/main/java/com/scalar/db/common/error/CoreError.java @@ -726,6 +726,12 @@ public enum CoreError implements ScalarDbError { "This TIMESTAMPTZ column value precision cannot be shorter than one millisecond. Value: %s", "", ""), + JDBC_IMPORT_DATA_TYPE_OVERRIDE_NOT_SUPPORTED( + Category.USER_ERROR, + "0158", + "The storage data type %s is not supported as ScalarDB %s data type: %s", + "", + ""), // // Errors for the concurrency error category diff --git a/core/src/main/java/com/scalar/db/service/AdminService.java b/core/src/main/java/com/scalar/db/service/AdminService.java index 46bf250049..a40181e67d 100644 --- a/core/src/main/java/com/scalar/db/service/AdminService.java +++ b/core/src/main/java/com/scalar/db/service/AdminService.java @@ -94,9 +94,10 @@ public void addNewColumnToTable( } @Override - public TableMetadata getImportTableMetadata(String namespace, String table) + public TableMetadata getImportTableMetadata( + String namespace, String table, Map overrideColumnsType) throws ExecutionException { - return admin.getImportTableMetadata(namespace, table); + return admin.getImportTableMetadata(namespace, table, overrideColumnsType); } @Override @@ -107,9 +108,13 @@ public void addRawColumnToTable( } @Override - public void importTable(String namespace, String table, Map options) + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) throws ExecutionException { - admin.importTable(namespace, table, options); + admin.importTable(namespace, table, options, overrideColumnsType); } @Override diff --git a/core/src/main/java/com/scalar/db/storage/cassandra/CassandraAdmin.java b/core/src/main/java/com/scalar/db/storage/cassandra/CassandraAdmin.java index 26ac81f940..4dcb46c2e5 100644 --- a/core/src/main/java/com/scalar/db/storage/cassandra/CassandraAdmin.java +++ b/core/src/main/java/com/scalar/db/storage/cassandra/CassandraAdmin.java @@ -275,7 +275,8 @@ private TableMetadata createTableMetadata(com.datastax.driver.core.TableMetadata } @Override - public TableMetadata getImportTableMetadata(String namespace, String table) { + public TableMetadata getImportTableMetadata( + String namespace, String table, Map overrideColumnsType) { throw new UnsupportedOperationException( CoreError.CASSANDRA_IMPORT_NOT_SUPPORTED.buildMessage()); } @@ -288,7 +289,11 @@ public void addRawColumnToTable( } @Override - public void importTable(String namespace, String table, Map options) { + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) { throw new UnsupportedOperationException( CoreError.CASSANDRA_IMPORT_NOT_SUPPORTED.buildMessage()); } diff --git a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosAdmin.java b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosAdmin.java index 5f1962677d..8de1848bf5 100644 --- a/core/src/main/java/com/scalar/db/storage/cosmos/CosmosAdmin.java +++ b/core/src/main/java/com/scalar/db/storage/cosmos/CosmosAdmin.java @@ -636,7 +636,8 @@ columnName, getFullTableName(namespace, table)), } @Override - public TableMetadata getImportTableMetadata(String namespace, String table) { + public TableMetadata getImportTableMetadata( + String namespace, String table, Map overrideColumnsType) { throw new UnsupportedOperationException(CoreError.COSMOS_IMPORT_NOT_SUPPORTED.buildMessage()); } @@ -647,7 +648,11 @@ public void addRawColumnToTable( } @Override - public void importTable(String namespace, String table, Map options) { + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) { throw new UnsupportedOperationException(CoreError.COSMOS_IMPORT_NOT_SUPPORTED.buildMessage()); } diff --git a/core/src/main/java/com/scalar/db/storage/dynamo/DynamoAdmin.java b/core/src/main/java/com/scalar/db/storage/dynamo/DynamoAdmin.java index 84982c07d9..9c9d8f39c5 100644 --- a/core/src/main/java/com/scalar/db/storage/dynamo/DynamoAdmin.java +++ b/core/src/main/java/com/scalar/db/storage/dynamo/DynamoAdmin.java @@ -1363,7 +1363,8 @@ columnName, getFullTableName(namespace, table)), } @Override - public TableMetadata getImportTableMetadata(String namespace, String table) { + public TableMetadata getImportTableMetadata( + String namespace, String table, Map overrideColumnsType) { throw new UnsupportedOperationException( "Import-related functionality is not supported in DynamoDB"); } @@ -1376,7 +1377,11 @@ public void addRawColumnToTable( } @Override - public void importTable(String namespace, String table, Map options) { + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) { throw new UnsupportedOperationException( "Import-related functionality is not supported in DynamoDB"); } diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/AbstractRdbEngine.java b/core/src/main/java/com/scalar/db/storage/jdbc/AbstractRdbEngine.java new file mode 100644 index 0000000000..d52bbd00de --- /dev/null +++ b/core/src/main/java/com/scalar/db/storage/jdbc/AbstractRdbEngine.java @@ -0,0 +1,38 @@ +package com.scalar.db.storage.jdbc; + +import com.scalar.db.common.error.CoreError; +import com.scalar.db.io.DataType; +import java.sql.JDBCType; +import javax.annotation.Nullable; + +public abstract class AbstractRdbEngine implements RdbEngineStrategy { + + @Override + public final DataType getDataTypeForScalarDb( + JDBCType type, + String typeName, + int columnSize, + int digits, + String columnDescription, + @Nullable DataType overrideDataType) { + DataType dataType = + getDataTypeForScalarDbInternal( + type, typeName, columnSize, digits, columnDescription, overrideDataType); + + if (overrideDataType != null && overrideDataType != dataType) { + throw new IllegalArgumentException( + CoreError.JDBC_IMPORT_DATA_TYPE_OVERRIDE_NOT_SUPPORTED.buildMessage( + typeName, overrideDataType, columnDescription)); + } + + return dataType; + } + + abstract DataType getDataTypeForScalarDbInternal( + JDBCType type, + String typeName, + int columnSize, + int digits, + String columnDescription, + @Nullable DataType overrideDataType); +} diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcAdmin.java b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcAdmin.java index 903ec00e31..4e4512c216 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcAdmin.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcAdmin.java @@ -494,7 +494,8 @@ public TableMetadata getTableMetadata(String namespace, String table) throws Exe } @Override - public TableMetadata getImportTableMetadata(String namespace, String table) + public TableMetadata getImportTableMetadata( + String namespace, String table, Map overrideColumnsType) throws ExecutionException { TableMetadata.Builder builder = TableMetadata.newBuilder(); boolean primaryKeyExists = false; @@ -530,14 +531,16 @@ public TableMetadata getImportTableMetadata(String namespace, String table) resultSet = metadata.getColumns(catalogName, schemaName, table, "%"); while (resultSet.next()) { String columnName = resultSet.getString(JDBC_COL_COLUMN_NAME); - builder.addColumn( - columnName, + DataType overrideDataType = overrideColumnsType.get(columnName); + DataType dataType = rdbEngine.getDataTypeForScalarDb( getJdbcType(resultSet.getInt(JDBC_COL_DATA_TYPE)), resultSet.getString(JDBC_COL_TYPE_NAME), resultSet.getInt(JDBC_COL_COLUMN_SIZE), resultSet.getInt(JDBC_COL_DECIMAL_DIGITS), - getFullTableName(namespace, table) + " " + columnName)); + getFullTableName(namespace, table) + " " + columnName, + overrideDataType); + builder.addColumn(columnName, dataType); } } catch (SQLException e) { throw new ExecutionException( @@ -551,10 +554,14 @@ public TableMetadata getImportTableMetadata(String namespace, String table) } @Override - public void importTable(String namespace, String table, Map options) + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) throws ExecutionException { try (Connection connection = dataSource.getConnection()) { - TableMetadata tableMetadata = getImportTableMetadata(namespace, table); + TableMetadata tableMetadata = getImportTableMetadata(namespace, table, overrideColumnsType); createNamespacesTableIfNotExists(connection); upsertIntoNamespacesTable(connection, namespace); addTableMetadata(connection, namespace, table, tableMetadata, true, false); diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcUtils.java b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcUtils.java index 376fc811f3..190bfc3330 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/JdbcUtils.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/JdbcUtils.java @@ -5,8 +5,12 @@ import java.sql.JDBCType; import java.util.Map.Entry; import org.apache.commons.dbcp2.BasicDataSource; +import org.slf4j.LoggerFactory; public final class JdbcUtils { + + private static final org.slf4j.Logger log = LoggerFactory.getLogger(JdbcUtils.class); + private JdbcUtils() {} public static BasicDataSource initDataSource(JdbcConfig config, RdbEngineStrategy rdbEngine) { diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMariaDB.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMariaDB.java index 9c3664feba..02913ed0b6 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMariaDB.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMariaDB.java @@ -3,6 +3,7 @@ import com.scalar.db.io.DataType; import java.sql.Driver; import java.sql.JDBCType; +import javax.annotation.Nullable; class RdbEngineMariaDB extends RdbEngineMysql { @Override @@ -11,14 +12,20 @@ public Driver getDriver() { } @Override - public DataType getDataTypeForScalarDb( - JDBCType type, String typeName, int columnSize, int digits, String columnDescription) { + DataType getDataTypeForScalarDbInternal( + JDBCType type, + String typeName, + int columnSize, + int digits, + String columnDescription, + @Nullable DataType overrideDataType) { if (type == JDBCType.BOOLEAN) { // MariaDB JDBC driver maps TINYINT(1) type as a BOOLEAN JDBC type which differs from the // MySQL driver which maps it to a BIT type. return DataType.BOOLEAN; } else { - return super.getDataTypeForScalarDb(type, typeName, columnSize, digits, columnDescription); + return super.getDataTypeForScalarDbInternal( + type, typeName, columnSize, digits, columnDescription, overrideDataType); } } } diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMysql.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMysql.java index bf0baacaac..b9aed27bb5 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMysql.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineMysql.java @@ -26,7 +26,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -class RdbEngineMysql implements RdbEngineStrategy { +class RdbEngineMysql extends AbstractRdbEngine { private static final Logger logger = LoggerFactory.getLogger(RdbEngineMysql.class); private final String keyColumnSize; private final RdbEngineTimeTypeMysql timeTypeEngine; @@ -237,8 +237,13 @@ public String getDataTypeForKey(DataType dataType) { } @Override - public DataType getDataTypeForScalarDb( - JDBCType type, String typeName, int columnSize, int digits, String columnDescription) { + DataType getDataTypeForScalarDbInternal( + JDBCType type, + String typeName, + int columnSize, + int digits, + String columnDescription, + @Nullable DataType overrideDataType) { switch (type) { case BIT: if (columnSize != 1) { @@ -310,6 +315,21 @@ public DataType getDataTypeForScalarDb( typeName); } return DataType.BLOB; + case DATE: + if (typeName.equalsIgnoreCase("YEAR")) { + throw new IllegalArgumentException( + CoreError.JDBC_IMPORT_DATA_TYPE_NOT_SUPPORTED.buildMessage( + typeName, columnDescription)); + } + return DataType.DATE; + case TIME: + return DataType.TIME; + // Both MySQL TIMESTAMP and DATETIME data types are mapped to the TIMESTAMP JDBC type + case TIMESTAMP: + if (overrideDataType == DataType.TIMESTAMPTZ || typeName.equalsIgnoreCase("TIMESTAMP")) { + return DataType.TIMESTAMPTZ; + } + return DataType.TIMESTAMP; default: throw new IllegalArgumentException( CoreError.JDBC_IMPORT_DATA_TYPE_NOT_SUPPORTED.buildMessage( diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineOracle.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineOracle.java index 49bec0f09d..7efef668ca 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineOracle.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineOracle.java @@ -22,10 +22,11 @@ import java.util.ArrayList; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -class RdbEngineOracle implements RdbEngineStrategy { +class RdbEngineOracle extends AbstractRdbEngine { private static final Logger logger = LoggerFactory.getLogger(RdbEngineOracle.class); private final String keyColumnSize; private final RdbEngineTimeTypeOracle timeTypeEngine; @@ -242,8 +243,13 @@ public String getDataTypeForKey(DataType dataType) { } @Override - public DataType getDataTypeForScalarDb( - JDBCType type, String typeName, int columnSize, int digits, String columnDescription) { + DataType getDataTypeForScalarDbInternal( + JDBCType type, + String typeName, + int columnSize, + int digits, + String columnDescription, + @Nullable DataType overrideDataType) { String numericTypeDescription = String.format("%s(%d, %d)", typeName, columnSize, digits); switch (type) { case NUMERIC: @@ -307,6 +313,29 @@ public DataType getDataTypeForScalarDb( columnDescription, typeName); return DataType.BLOB; + case TIMESTAMP: + // handles "date" type + if (typeName.equalsIgnoreCase("date")) { + if (overrideDataType == DataType.TIME) { + return DataType.TIME; + } + if (overrideDataType == DataType.TIMESTAMP) { + return DataType.TIMESTAMP; + } + return DataType.DATE; + } + // handles "timestamp" type + if (overrideDataType == DataType.TIME) { + return DataType.TIME; + } + return DataType.TIMESTAMP; + case OTHER: + if (typeName.toLowerCase().endsWith("time zone")) { + return DataType.TIMESTAMPTZ; + } + throw new IllegalArgumentException( + CoreError.JDBC_IMPORT_DATA_TYPE_NOT_SUPPORTED.buildMessage( + typeName, columnDescription)); default: throw new IllegalArgumentException( CoreError.JDBC_IMPORT_DATA_TYPE_NOT_SUPPORTED.buildMessage( diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEnginePostgresql.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEnginePostgresql.java index 7cdbca69e3..8ab6c02958 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEnginePostgresql.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEnginePostgresql.java @@ -21,10 +21,11 @@ import java.util.ArrayList; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -class RdbEnginePostgresql implements RdbEngineStrategy { +class RdbEnginePostgresql extends AbstractRdbEngine { private static final Logger logger = LoggerFactory.getLogger(RdbEnginePostgresql.class); private final RdbEngineTimeTypePostgresql timeTypeEngine; @@ -230,8 +231,13 @@ public String getDataTypeForKey(DataType dataType) { } @Override - public DataType getDataTypeForScalarDb( - JDBCType type, String typeName, int columnSize, int digits, String columnDescription) { + DataType getDataTypeForScalarDbInternal( + JDBCType type, + String typeName, + int columnSize, + int digits, + String columnDescription, + @Nullable DataType overrideDataType) { switch (type) { case BIT: if (columnSize != 1) { @@ -289,6 +295,20 @@ public DataType getDataTypeForScalarDb( return DataType.TEXT; case BINARY: return DataType.BLOB; + case DATE: + return DataType.DATE; + case TIME: + if (typeName.equalsIgnoreCase("timetz")) { + throw new IllegalArgumentException( + CoreError.JDBC_IMPORT_DATA_TYPE_NOT_SUPPORTED.buildMessage( + typeName, columnDescription)); + } + return DataType.TIME; + case TIMESTAMP: + if (typeName.equalsIgnoreCase("timestamptz")) { + return DataType.TIMESTAMPTZ; + } + return DataType.TIMESTAMP; default: throw new IllegalArgumentException( CoreError.JDBC_IMPORT_DATA_TYPE_NOT_SUPPORTED.buildMessage( diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlServer.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlServer.java index 2630147bc9..4714d6d2f6 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlServer.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlServer.java @@ -22,7 +22,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -class RdbEngineSqlServer implements RdbEngineStrategy { +class RdbEngineSqlServer extends AbstractRdbEngine { private static final Logger logger = LoggerFactory.getLogger(RdbEngineSqlServer.class); private final RdbEngineTimeTypeSqlServer timeTypeEngine; @@ -206,8 +206,13 @@ public String getDataTypeForKey(DataType dataType) { } @Override - public DataType getDataTypeForScalarDb( - JDBCType type, String typeName, int columnSize, int digits, String columnDescription) { + DataType getDataTypeForScalarDbInternal( + JDBCType type, + String typeName, + int columnSize, + int digits, + String columnDescription, + DataType overrideDataType) { switch (type) { case BIT: if (columnSize != 1) { @@ -273,6 +278,19 @@ public DataType getDataTypeForScalarDb( return DataType.BLOB; case LONGVARBINARY: return DataType.BLOB; + case DATE: + return DataType.DATE; + case TIME: + return DataType.TIME; + case TIMESTAMP: + return DataType.TIMESTAMP; + case OTHER: + if (!typeName.equalsIgnoreCase("datetimeoffset")) { + throw new IllegalArgumentException( + CoreError.JDBC_IMPORT_DATA_TYPE_NOT_SUPPORTED.buildMessage( + typeName, columnDescription)); + } + return DataType.TIMESTAMPTZ; default: throw new IllegalArgumentException( CoreError.JDBC_IMPORT_DATA_TYPE_NOT_SUPPORTED.buildMessage( @@ -304,7 +322,7 @@ public int getSqlTypes(DataType dataType) { case TIMESTAMP: return Types.TIMESTAMP; case TIMESTAMPTZ: - return Types.TIMESTAMP_WITH_TIMEZONE; + return microsoft.sql.Types.DATETIMEOFFSET; default: throw new AssertionError(); } diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlite.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlite.java index 42d37ed9bf..fb436751c4 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlite.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineSqlite.java @@ -35,7 +35,7 @@ * native SQLite library in JAR, we should assure the real error messages in * RdbEngineStrategyTest. */ -class RdbEngineSqlite implements RdbEngineStrategy { +class RdbEngineSqlite extends AbstractRdbEngine { private static final String NAMESPACE_SEPARATOR = "$"; private final RdbEngineTimeTypeSqlite timeTypeEngine; @@ -152,8 +152,13 @@ public String getTextType(int charLength) { } @Override - public DataType getDataTypeForScalarDb( - JDBCType type, String typeName, int columnSize, int digits, String columnDescription) { + public DataType getDataTypeForScalarDbInternal( + JDBCType type, + String typeName, + int columnSize, + int digits, + String columnDescription, + DataType overrideDataType) { throw new AssertionError("SQLite is not supported"); } diff --git a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineStrategy.java b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineStrategy.java index d4761f3b57..6dc20be379 100644 --- a/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineStrategy.java +++ b/core/src/main/java/com/scalar/db/storage/jdbc/RdbEngineStrategy.java @@ -42,7 +42,12 @@ public interface RdbEngineStrategy { String getDataTypeForKey(DataType dataType); DataType getDataTypeForScalarDb( - JDBCType type, String typeName, int columnSize, int digits, String columnDescription); + JDBCType type, + String typeName, + int columnSize, + int digits, + String columnDescription, + @Nullable DataType overrideDataType); int getSqlTypes(DataType dataType); diff --git a/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorageAdmin.java b/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorageAdmin.java index 3196bce219..a1443e1999 100644 --- a/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorageAdmin.java +++ b/core/src/main/java/com/scalar/db/storage/multistorage/MultiStorageAdmin.java @@ -196,9 +196,10 @@ public void addNewColumnToTable( } @Override - public TableMetadata getImportTableMetadata(String namespace, String table) + public TableMetadata getImportTableMetadata( + String namespace, String table, Map overrideColumnsType) throws ExecutionException { - return getAdmin(namespace, table).getImportTableMetadata(namespace, table); + return getAdmin(namespace, table).getImportTableMetadata(namespace, table, overrideColumnsType); } @Override @@ -209,9 +210,13 @@ public void addRawColumnToTable( } @Override - public void importTable(String namespace, String table, Map options) + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) throws ExecutionException { - getAdmin(namespace, table).importTable(namespace, table, options); + getAdmin(namespace, table).importTable(namespace, table, options, overrideColumnsType); } @Override diff --git a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdmin.java b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdmin.java index 2661d36773..8d86be812e 100644 --- a/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdmin.java +++ b/core/src/main/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdmin.java @@ -246,7 +246,11 @@ public Set getNamespaceNames() throws ExecutionException { } @Override - public void importTable(String namespace, String table, Map options) + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) throws ExecutionException { checkNamespace(namespace); @@ -256,7 +260,7 @@ public void importTable(String namespace, String table, Map opti CoreError.TABLE_ALREADY_EXISTS.buildMessage( ScalarDbUtils.getFullTableName(namespace, table))); } - tableMetadata = admin.getImportTableMetadata(namespace, table); + tableMetadata = admin.getImportTableMetadata(namespace, table, overrideColumnsType); // add transaction metadata columns for (Map.Entry entry : diff --git a/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransactionAdmin.java b/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransactionAdmin.java index 5bf7f0a6c7..76f060903e 100644 --- a/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransactionAdmin.java +++ b/core/src/main/java/com/scalar/db/transaction/jdbc/JdbcTransactionAdmin.java @@ -86,9 +86,13 @@ public boolean namespaceExists(String namespace) throws ExecutionException { } @Override - public void importTable(String namespace, String table, Map options) + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) throws ExecutionException { - jdbcAdmin.importTable(namespace, table, options); + jdbcAdmin.importTable(namespace, table, options, overrideColumnsType); } /** diff --git a/core/src/main/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionAdmin.java b/core/src/main/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionAdmin.java index 36907e8669..a941639f1f 100644 --- a/core/src/main/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionAdmin.java +++ b/core/src/main/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionAdmin.java @@ -86,9 +86,13 @@ public boolean namespaceExists(String namespace) throws ExecutionException { } @Override - public void importTable(String namespace, String table, Map options) + public void importTable( + String namespace, + String table, + Map options, + Map overrideColumnsType) throws ExecutionException { - distributedStorageAdmin.importTable(namespace, table, options); + distributedStorageAdmin.importTable(namespace, table, options, overrideColumnsType); } @Override diff --git a/core/src/test/java/com/scalar/db/storage/cassandra/CassandraAdminTest.java b/core/src/test/java/com/scalar/db/storage/cassandra/CassandraAdminTest.java index 987a216842..5a10d322a0 100644 --- a/core/src/test/java/com/scalar/db/storage/cassandra/CassandraAdminTest.java +++ b/core/src/test/java/com/scalar/db/storage/cassandra/CassandraAdminTest.java @@ -879,12 +879,16 @@ public void unsupportedOperations_ShouldThrowUnsupportedException() { // Act Throwable thrown1 = - catchThrowable(() -> cassandraAdmin.getImportTableMetadata(namespace, table)); + catchThrowable( + () -> cassandraAdmin.getImportTableMetadata(namespace, table, ImmutableMap.of())); Throwable thrown2 = catchThrowable( () -> cassandraAdmin.addRawColumnToTable(namespace, table, column, DataType.INT)); Throwable thrown3 = - catchThrowable(() -> cassandraAdmin.importTable(namespace, table, Collections.emptyMap())); + catchThrowable( + () -> + cassandraAdmin.importTable( + namespace, table, Collections.emptyMap(), Collections.emptyMap())); // Assert assertThat(thrown1).isInstanceOf(UnsupportedOperationException.class); diff --git a/core/src/test/java/com/scalar/db/storage/cosmos/CosmosAdminTest.java b/core/src/test/java/com/scalar/db/storage/cosmos/CosmosAdminTest.java index 9dc24dac5a..bfeb562889 100644 --- a/core/src/test/java/com/scalar/db/storage/cosmos/CosmosAdminTest.java +++ b/core/src/test/java/com/scalar/db/storage/cosmos/CosmosAdminTest.java @@ -1075,11 +1075,16 @@ public void unsupportedOperations_ShouldThrowUnsupportedException() { String column = "col"; // Act - Throwable thrown1 = catchThrowable(() -> admin.getImportTableMetadata(namespace, table)); + Throwable thrown1 = + catchThrowable( + () -> admin.getImportTableMetadata(namespace, table, Collections.emptyMap())); Throwable thrown2 = catchThrowable(() -> admin.addRawColumnToTable(namespace, table, column, DataType.INT)); Throwable thrown3 = - catchThrowable(() -> admin.importTable(namespace, table, Collections.emptyMap())); + catchThrowable( + () -> + admin.importTable( + namespace, table, Collections.emptyMap(), Collections.emptyMap())); // Assert assertThat(thrown1).isInstanceOf(UnsupportedOperationException.class); diff --git a/core/src/test/java/com/scalar/db/storage/dynamo/DynamoAdminTestBase.java b/core/src/test/java/com/scalar/db/storage/dynamo/DynamoAdminTestBase.java index 7e17c4d8d8..c2072e9445 100644 --- a/core/src/test/java/com/scalar/db/storage/dynamo/DynamoAdminTestBase.java +++ b/core/src/test/java/com/scalar/db/storage/dynamo/DynamoAdminTestBase.java @@ -1595,11 +1595,16 @@ public void getNamespacesNames_WithNonExistingNamespacesTable_ShouldReturnEmptyS @Test public void unsupportedOperations_ShouldThrowUnsupportedException() { // Arrange Act - Throwable thrown1 = catchThrowable(() -> admin.getImportTableMetadata(NAMESPACE, TABLE)); + Throwable thrown1 = + catchThrowable( + () -> admin.getImportTableMetadata(NAMESPACE, TABLE, Collections.emptyMap())); Throwable thrown2 = catchThrowable(() -> admin.addRawColumnToTable(NAMESPACE, TABLE, "c1", DataType.INT)); Throwable thrown3 = - catchThrowable(() -> admin.importTable(NAMESPACE, TABLE, Collections.emptyMap())); + catchThrowable( + () -> + admin.importTable( + NAMESPACE, TABLE, Collections.emptyMap(), Collections.emptyMap())); // Assert assertThat(thrown1).isInstanceOf(UnsupportedOperationException.class); diff --git a/core/src/test/java/com/scalar/db/storage/jdbc/JdbcAdminTest.java b/core/src/test/java/com/scalar/db/storage/jdbc/JdbcAdminTest.java index 9e6c846532..104140a962 100644 --- a/core/src/test/java/com/scalar/db/storage/jdbc/JdbcAdminTest.java +++ b/core/src/test/java/com/scalar/db/storage/jdbc/JdbcAdminTest.java @@ -12,7 +12,10 @@ import static org.assertj.core.api.Assertions.catchThrowable; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.description; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; @@ -35,6 +38,7 @@ import com.scalar.db.io.DataType; import java.sql.Connection; import java.sql.DatabaseMetaData; +import java.sql.JDBCType; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; @@ -70,7 +74,7 @@ public class JdbcAdminTest { private static final String NAMESPACE = "namespace"; private static final String TABLE = "table"; private static final String COLUMN_1 = "c1"; - private static final ImmutableMap RDB_ENGINES = + public static final ImmutableMap RDB_ENGINES = ImmutableMap.of( RdbEngine.MYSQL, new RdbEngineMysql(), @@ -2687,7 +2691,7 @@ public Boolean answer(InvocationOnMock invocation) { .thenReturn(""); when(columnResults.getInt(JDBC_COL_COLUMN_SIZE)).thenReturn(0).thenReturn(0).thenReturn(0); when(columnResults.getInt(JDBC_COL_DECIMAL_DIGITS)).thenReturn(0).thenReturn(0).thenReturn(0); - RdbEngineStrategy rdbEngineStrategy = getRdbEngineStrategy(rdbEngine); + RdbEngineStrategy rdbEngineStrategy = spy(getRdbEngineStrategy(rdbEngine)); if (rdbEngineStrategy instanceof RdbEngineMysql) { when(metadata.getPrimaryKeys(NAMESPACE, NAMESPACE, TABLE)).thenReturn(primaryKeyResults); when(metadata.getColumns(NAMESPACE, NAMESPACE, TABLE, "%")).thenReturn(columnResults); @@ -2700,18 +2704,42 @@ public Boolean answer(InvocationOnMock invocation) { expectedColumns.put("pk1", DataType.TEXT); expectedColumns.put("pk2", DataType.TEXT); expectedColumns.put("col", DataType.FLOAT); - - JdbcAdmin admin = createJdbcAdminFor(rdbEngine); + JdbcAdmin admin = createJdbcAdminFor(rdbEngineStrategy); String description = "database engine specific test failed: " + rdbEngine; + Map overrideColumnsType = ImmutableMap.of("col", DataType.FLOAT); // Act - TableMetadata actual = admin.getImportTableMetadata(NAMESPACE, TABLE); + TableMetadata actual = admin.getImportTableMetadata(NAMESPACE, TABLE, overrideColumnsType); // Assert verify(checkTableExistStatement, description(description)) .execute(expectedCheckTableExistStatement); assertThat(actual.getPartitionKeyNames()).hasSameElementsAs(ImmutableSet.of("pk1", "pk2")); assertThat(actual.getColumnDataTypes()).containsExactlyEntriesOf(expectedColumns); + verify(rdbEngineStrategy) + .getDataTypeForScalarDb( + any(JDBCType.class), + anyString(), + anyInt(), + anyInt(), + eq(getFullTableName(NAMESPACE, TABLE) + " pk1"), + eq(null)); + verify(rdbEngineStrategy) + .getDataTypeForScalarDb( + any(JDBCType.class), + anyString(), + anyInt(), + anyInt(), + eq(getFullTableName(NAMESPACE, TABLE) + " pk2"), + eq(null)); + verify(rdbEngineStrategy) + .getDataTypeForScalarDb( + any(JDBCType.class), + anyString(), + anyInt(), + anyInt(), + eq(getFullTableName(NAMESPACE, TABLE) + " col"), + eq(DataType.FLOAT)); } private void getImportTableMetadata_ForSQLite_ShouldThrowUnsupportedOperationException( @@ -2720,7 +2748,8 @@ private void getImportTableMetadata_ForSQLite_ShouldThrowUnsupportedOperationExc JdbcAdmin admin = createJdbcAdminFor(rdbEngine); // Act Assert - assertThatThrownBy(() -> admin.getImportTableMetadata(NAMESPACE, TABLE)) + // TODO Check this change + assertThatThrownBy(() -> admin.getImportTableMetadata(NAMESPACE, TABLE, Collections.emptyMap())) .isInstanceOf(UnsupportedOperationException.class); } @@ -2759,7 +2788,8 @@ private void getImportTableMetadata_WithNonExistingTableForX_ShouldThrowIllegalA when(checkTableExistStatement.execute(any())).thenThrow(sqlException); // Act Assert - assertThatThrownBy(() -> admin.getImportTableMetadata(NAMESPACE, TABLE)) + // TODO Check this change + assertThatThrownBy(() -> admin.getImportTableMetadata(NAMESPACE, TABLE, Collections.emptyMap())) .isInstanceOf(IllegalArgumentException.class); verify( checkTableExistStatement, @@ -2788,7 +2818,9 @@ private void getImportTableMetadata_PrimaryKeyNotExistsForX_ShouldThrowIllegalSt String description = "database engine specific test failed: " + rdbEngine; // Act - Throwable thrown = catchThrowable(() -> admin.getImportTableMetadata(NAMESPACE, TABLE)); + Throwable thrown = + catchThrowable( + () -> admin.getImportTableMetadata(NAMESPACE, TABLE, Collections.emptyMap())); // Assert verify(checkTableExistStatement, description(description)) @@ -2821,8 +2853,8 @@ private void getImportTableMetadata_UnsupportedDataTypeGivenForX_ShouldThrowExec when(primaryKeyResults.getString(JDBC_COL_COLUMN_NAME)).thenReturn("pk1"); when(columnResults.next()).thenReturn(true).thenReturn(false); when(columnResults.getString(JDBC_COL_COLUMN_NAME)).thenReturn("pk1"); - when(columnResults.getInt(JDBC_COL_DATA_TYPE)).thenReturn(Types.TIMESTAMP); - when(columnResults.getString(JDBC_COL_TYPE_NAME)).thenReturn("timestamp"); + when(columnResults.getInt(JDBC_COL_DATA_TYPE)).thenReturn(Types.OTHER); + when(columnResults.getString(JDBC_COL_TYPE_NAME)).thenReturn("any_unsupported_type"); when(columnResults.getInt(JDBC_COL_COLUMN_SIZE)).thenReturn(0); when(columnResults.getInt(JDBC_COL_DECIMAL_DIGITS)).thenReturn(0); @@ -2839,7 +2871,9 @@ private void getImportTableMetadata_UnsupportedDataTypeGivenForX_ShouldThrowExec String description = "database engine specific test failed: " + rdbEngine; // Act - Throwable thrown = catchThrowable(() -> admin.getImportTableMetadata(NAMESPACE, TABLE)); + Throwable thrown = + catchThrowable( + () -> admin.getImportTableMetadata(NAMESPACE, TABLE, Collections.emptyMap())); // Assert verify(checkTableExistStatement, description(description)) @@ -2931,7 +2965,10 @@ public void importTable_ForXBesidesSqlite_ShouldWorkProperly(RdbEngine rdbEngine when(dataSource.getConnection()).thenReturn(connection); TableMetadata importedTableMetadata = mock(TableMetadata.class); - doReturn(importedTableMetadata).when(adminSpy).getImportTableMetadata(anyString(), anyString()); + // TODO Check this change + doReturn(importedTableMetadata) + .when(adminSpy) + .getImportTableMetadata(anyString(), anyString(), anyMap()); doNothing().when(adminSpy).createNamespacesTableIfNotExists(connection); doNothing().when(adminSpy).upsertIntoNamespacesTable(any(), anyString()); doNothing() @@ -2942,7 +2979,8 @@ public void importTable_ForXBesidesSqlite_ShouldWorkProperly(RdbEngine rdbEngine adminSpy.importTable(NAMESPACE, TABLE, Collections.emptyMap()); // Assert - verify(adminSpy).getImportTableMetadata(NAMESPACE, TABLE); + // TODO Check this change + verify(adminSpy).getImportTableMetadata(NAMESPACE, TABLE, Collections.emptyMap()); verify(adminSpy).createNamespacesTableIfNotExists(connection); verify(adminSpy).upsertIntoNamespacesTable(connection, NAMESPACE); verify(adminSpy) diff --git a/core/src/test/java/com/scalar/db/storage/jdbc/RdbEngineTest.java b/core/src/test/java/com/scalar/db/storage/jdbc/RdbEngineTest.java index fabbeff060..1a19fa7843 100644 --- a/core/src/test/java/com/scalar/db/storage/jdbc/RdbEngineTest.java +++ b/core/src/test/java/com/scalar/db/storage/jdbc/RdbEngineTest.java @@ -12,6 +12,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; +import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.params.ParameterizedTest; @@ -40,19 +41,47 @@ public void getDataTypeForScalarDbTest(RdbEngine rdbEngineType) { String description = String.format( "database engine specific test failed: " - + "%s, JDBCType = %s, type name = %s, column size = %d, digits = %dp", - rdbEngineType, given.type, given.typeName, given.columnSize, given.digits); + + "%s, JDBCType = %s, type name = %s, column size = %d, digits = %dp, overrideDataType = %s", + rdbEngineType, + given.type, + given.typeName, + given.columnSize, + given.digits, + given.overrideDataType); if (expected != null) { - DataType actual = - rdbEngine.getDataTypeForScalarDb( - given.type, given.typeName, given.columnSize, given.digits, ""); - assertThat(actual).as(description).isEqualTo(expected); + if (given.overrideDataType != null) { + DataType actualWithAllowedOverride = + rdbEngine.getDataTypeForScalarDb( + given.type, + given.typeName, + given.columnSize, + given.digits, + "", + given.overrideDataType); + assertThat(actualWithAllowedOverride).as(description).isEqualTo(expected); + } else { + DataType actualWithoutOverride = + rdbEngine.getDataTypeForScalarDb( + given.type, given.typeName, given.columnSize, given.digits, "", null); + assertThat(actualWithoutOverride).as(description).isEqualTo(expected); + + // Overriding with the default type mapping should works as well + DataType actualWithOverrideSameAsDefault = + rdbEngine.getDataTypeForScalarDb( + given.type, given.typeName, given.columnSize, given.digits, "", expected); + assertThat(actualWithOverrideSameAsDefault).as(description).isEqualTo(expected); + } } else { Throwable thrown = catchThrowable( () -> rdbEngine.getDataTypeForScalarDb( - given.type, given.typeName, given.columnSize, given.digits, "")); + given.type, + given.typeName, + given.columnSize, + given.digits, + "", + given.overrideDataType)); assertThat(thrown).as(description).isInstanceOf(IllegalArgumentException.class); } }); @@ -155,18 +184,64 @@ private static void prepareDataTypeMap() { DATA_TYPE_MAP.get(SQL_SERVER).put(new Column(JDBCType.DECIMAL, "decimal"), null); // DATE/TIME/TIMESTAMP - DATA_TYPE_MAP.get(MYSQL).put(new Column(JDBCType.DATE, "DATE"), null); - DATA_TYPE_MAP.get(MYSQL).put(new Column(JDBCType.TIME, "TIME"), null); - DATA_TYPE_MAP.get(MYSQL).put(new Column(JDBCType.TIMESTAMP, "TIMESTAMP"), null); - DATA_TYPE_MAP.get(POSTGRESQL).put(new Column(JDBCType.DATE, "date"), null); - DATA_TYPE_MAP.get(POSTGRESQL).put(new Column(JDBCType.TIME, "time"), null); - DATA_TYPE_MAP.get(POSTGRESQL).put(new Column(JDBCType.TIMESTAMP, "timestamp"), null); - DATA_TYPE_MAP.get(ORACLE).put(new Column(JDBCType.DATE, "DATE"), null); - DATA_TYPE_MAP.get(ORACLE).put(new Column(JDBCType.TIME, "TIME"), null); - DATA_TYPE_MAP.get(ORACLE).put(new Column(JDBCType.TIMESTAMP, "TIMESTAMP"), null); - DATA_TYPE_MAP.get(SQL_SERVER).put(new Column(JDBCType.DATE, "date"), null); - DATA_TYPE_MAP.get(SQL_SERVER).put(new Column(JDBCType.TIME, "time"), null); - DATA_TYPE_MAP.get(SQL_SERVER).put(new Column(JDBCType.TIMESTAMP, "datetime"), null); + DATA_TYPE_MAP.get(MYSQL).put(new Column(JDBCType.DATE, "DATE"), DataType.DATE); + DATA_TYPE_MAP.get(MYSQL).put(new Column(JDBCType.DATE, "YEAR"), null); + DATA_TYPE_MAP.get(MYSQL).put(new Column(JDBCType.TIME, "TIME"), DataType.TIME); + DATA_TYPE_MAP.get(MYSQL).put(new Column(JDBCType.TIMESTAMP, "DATETIME"), DataType.TIMESTAMP); + DATA_TYPE_MAP + .get(MYSQL) + .put( + new Column(JDBCType.TIMESTAMP, "DATETIME", DataType.TIMESTAMPTZ), DataType.TIMESTAMPTZ); + DATA_TYPE_MAP.get(MYSQL).put(new Column(JDBCType.TIMESTAMP, "TIMESTAMP"), DataType.TIMESTAMPTZ); + DATA_TYPE_MAP + .get(MYSQL) + .put(new Column(JDBCType.TIMESTAMP, "TIMESTAMP", DataType.TIMESTAMP), null); + DATA_TYPE_MAP.get(POSTGRESQL).put(new Column(JDBCType.DATE, "date"), DataType.DATE); + DATA_TYPE_MAP.get(POSTGRESQL).put(new Column(JDBCType.TIME, "time"), DataType.TIME); + DATA_TYPE_MAP.get(POSTGRESQL).put(new Column(JDBCType.TIME, "timetz"), null); + DATA_TYPE_MAP + .get(POSTGRESQL) + .put(new Column(JDBCType.TIMESTAMP, "timestamp"), DataType.TIMESTAMP); + DATA_TYPE_MAP + .get(POSTGRESQL) + .put(new Column(JDBCType.TIMESTAMP, "timestamptz"), DataType.TIMESTAMPTZ); + DATA_TYPE_MAP.get(ORACLE).put(new Column(JDBCType.TIMESTAMP, "DATE"), DataType.DATE); + DATA_TYPE_MAP + .get(ORACLE) + .put(new Column(JDBCType.TIMESTAMP, "DATE", DataType.TIME), DataType.TIME); + DATA_TYPE_MAP + .get(ORACLE) + .put(new Column(JDBCType.TIMESTAMP, "DATE", DataType.TIMESTAMP), DataType.TIMESTAMP); + DATA_TYPE_MAP.get(ORACLE).put(new Column(JDBCType.TIMESTAMP, "TIMESTAMP"), DataType.TIMESTAMP); + DATA_TYPE_MAP + .get(ORACLE) + .put(new Column(JDBCType.TIMESTAMP, "TIMESTAMP", DataType.TIME), DataType.TIME); + DATA_TYPE_MAP + .get(ORACLE) + .put(new Column(JDBCType.OTHER, "TIMESTAMP WITH TIME ZONE"), DataType.TIMESTAMPTZ); + DATA_TYPE_MAP + .get(ORACLE) + .put(new Column(JDBCType.OTHER, "TIMESTAMP(3) WITH TIME ZONE"), DataType.TIMESTAMPTZ); + DATA_TYPE_MAP + .get(ORACLE) + .put(new Column(JDBCType.OTHER, "TIMESTAMP WITH LOCAL TIME ZONE"), DataType.TIMESTAMPTZ); + DATA_TYPE_MAP + .get(ORACLE) + .put(new Column(JDBCType.OTHER, "TIMESTAMP(1) WITH LOCAL TIME ZONE"), DataType.TIMESTAMPTZ); + DATA_TYPE_MAP.get(SQL_SERVER).put(new Column(JDBCType.DATE, "date"), DataType.DATE); + DATA_TYPE_MAP.get(SQL_SERVER).put(new Column(JDBCType.TIME, "time"), DataType.TIME); + DATA_TYPE_MAP + .get(SQL_SERVER) + .put(new Column(JDBCType.TIMESTAMP, "datetime"), DataType.TIMESTAMP); + DATA_TYPE_MAP + .get(SQL_SERVER) + .put(new Column(JDBCType.TIMESTAMP, "datetime2"), DataType.TIMESTAMP); + DATA_TYPE_MAP + .get(SQL_SERVER) + .put(new Column(JDBCType.TIMESTAMP, "smalldatetime"), DataType.TIMESTAMP); + DATA_TYPE_MAP + .get(SQL_SERVER) + .put(new Column(JDBCType.OTHER, "datetimeoffset"), DataType.TIMESTAMPTZ); // Other unsupported data types DATA_TYPE_MAP.get(MYSQL).put(new Column(JDBCType.BIT, "BIT", 8, 0), null); @@ -186,16 +261,26 @@ private static class Column { final String typeName; final int columnSize; final int digits; + @Nullable final DataType overrideDataType; Column(JDBCType type, String typeName) { - this(type, typeName, 0, 0); + this(type, typeName, 0, 0, null); + } + + Column(JDBCType type, String typeName, DataType overrideDataType) { + this(type, typeName, 0, 0, overrideDataType); } Column(JDBCType type, String typeName, int columnSize, int digits) { + this(type, typeName, columnSize, digits, null); + } + + Column(JDBCType type, String typeName, int columnSize, int digits, DataType overrideDataType) { this.type = type; this.typeName = typeName; this.columnSize = columnSize; this.digits = digits; + this.overrideDataType = overrideDataType; } @Override @@ -215,7 +300,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(type, typeName, columnSize, digits); + return Objects.hash(type, typeName, columnSize, digits, overrideDataType); } @Override diff --git a/core/src/test/java/com/scalar/db/storage/multistorage/MultiStorageAdminTest.java b/core/src/test/java/com/scalar/db/storage/multistorage/MultiStorageAdminTest.java index 6c2b1ffab9..bceea4defc 100644 --- a/core/src/test/java/com/scalar/db/storage/multistorage/MultiStorageAdminTest.java +++ b/core/src/test/java/com/scalar/db/storage/multistorage/MultiStorageAdminTest.java @@ -617,4 +617,34 @@ public void upgrade_ShouldCallNamespaceAndDefaultAdmins() throws ExecutionExcept verify(admin1).upgrade(options); verify(admin2).upgrade(options); } + + @Test + public void importTable_ForTable1InNamespace1_ShouldCallAdmin1() throws ExecutionException { + // Arrange + String namespace = NAMESPACE1; + String table = TABLE1; + Map options = ImmutableMap.of("a", "b"); + Map overrideColumnsType = ImmutableMap.of("c", DataType.TIMESTAMPTZ); + + // Act + multiStorageAdmin.importTable(namespace, table, options, overrideColumnsType); + + // Assert + verify(admin1).importTable(namespace, table, options, overrideColumnsType); + } + + @Test + public void getImportTableMetadata_ForTable1InNamespace1_ShouldCallAdmin1() + throws ExecutionException { + // Arrange + String namespace = NAMESPACE1; + String table = TABLE1; + Map overrideColumnsType = ImmutableMap.of("c", DataType.TIMESTAMPTZ); + + // Act + multiStorageAdmin.getImportTableMetadata(namespace, table, overrideColumnsType); + + // Assert + verify(admin1).getImportTableMetadata(namespace, table, overrideColumnsType); + } } diff --git a/core/src/test/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdminTestBase.java b/core/src/test/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdminTestBase.java index 7b6c29c6ad..696ea3c5d8 100644 --- a/core/src/test/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdminTestBase.java +++ b/core/src/test/java/com/scalar/db/transaction/consensuscommit/ConsensusCommitAdminTestBase.java @@ -606,6 +606,7 @@ public void addNewColumnToTable_WithEncrypted_ShouldThrowUnsupportedOperationExc public void importTable_ShouldCallStorageAdminProperly() throws ExecutionException { // Arrange Map options = ImmutableMap.of("foo", "bar"); + Map overrideColumnsType = ImmutableMap.of("col", DataType.TEXT); String primaryKeyColumn = "pk"; String column = "col"; TableMetadata metadata = @@ -615,17 +616,18 @@ public void importTable_ShouldCallStorageAdminProperly() throws ExecutionExcepti .addPartitionKey(primaryKeyColumn) .build(); when(distributedStorageAdmin.getTableMetadata(NAMESPACE, TABLE)).thenReturn(null); - when(distributedStorageAdmin.getImportTableMetadata(NAMESPACE, TABLE)).thenReturn(metadata); + when(distributedStorageAdmin.getImportTableMetadata(NAMESPACE, TABLE, overrideColumnsType)) + .thenReturn(metadata); doNothing() .when(distributedStorageAdmin) .addRawColumnToTable(anyString(), anyString(), anyString(), any(DataType.class)); // Act - admin.importTable(NAMESPACE, TABLE, options); + admin.importTable(NAMESPACE, TABLE, options, overrideColumnsType); // Assert verify(distributedStorageAdmin).getTableMetadata(NAMESPACE, TABLE); - verify(distributedStorageAdmin).getImportTableMetadata(NAMESPACE, TABLE); + verify(distributedStorageAdmin).getImportTableMetadata(NAMESPACE, TABLE, overrideColumnsType); for (Entry entry : ConsensusCommitUtils.getTransactionMetaColumns().entrySet()) { verify(distributedStorageAdmin) diff --git a/core/src/test/java/com/scalar/db/transaction/jdbc/JdbcTransactionAdminTest.java b/core/src/test/java/com/scalar/db/transaction/jdbc/JdbcTransactionAdminTest.java index 02bbca7226..8e0f9f077c 100644 --- a/core/src/test/java/com/scalar/db/transaction/jdbc/JdbcTransactionAdminTest.java +++ b/core/src/test/java/com/scalar/db/transaction/jdbc/JdbcTransactionAdminTest.java @@ -235,10 +235,10 @@ public void importTable_ShouldCallJdbcAdminProperly() throws ExecutionException String table = "tbl"; // Act - admin.importTable(namespace, table, Collections.emptyMap()); + admin.importTable(namespace, table, Collections.emptyMap(), Collections.emptyMap()); // Assert - verify(jdbcAdmin).importTable(namespace, table, Collections.emptyMap()); + verify(jdbcAdmin).importTable(namespace, table, Collections.emptyMap(), Collections.emptyMap()); } @Test diff --git a/core/src/test/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionAdminTest.java b/core/src/test/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionAdminTest.java index 1ed54f438f..b108f5d801 100644 --- a/core/src/test/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionAdminTest.java +++ b/core/src/test/java/com/scalar/db/transaction/singlecrudoperation/SingleCrudOperationTransactionAdminTest.java @@ -238,12 +238,14 @@ public void importTable_ShouldCallDistributedStorageAdminProperly() throws Execu // Arrange String namespace = "ns"; String table = "tbl"; + Map options = ImmutableMap.of("a", "b"); + Map overrideColumnsType = ImmutableMap.of("c", DataType.TIMESTAMPTZ); // Act - admin.importTable(namespace, table, Collections.emptyMap()); + admin.importTable(namespace, table, options, overrideColumnsType); // Assert - verify(distributedStorageAdmin).importTable(namespace, table, Collections.emptyMap()); + verify(distributedStorageAdmin).importTable(namespace, table, options, overrideColumnsType); } @Test diff --git a/integration-test/src/main/java/com/scalar/db/api/DistributedStorageAdminImportTableIntegrationTestBase.java b/integration-test/src/main/java/com/scalar/db/api/DistributedStorageAdminImportTableIntegrationTestBase.java index dae2a4b559..8303d65c8a 100644 --- a/integration-test/src/main/java/com/scalar/db/api/DistributedStorageAdminImportTableIntegrationTestBase.java +++ b/integration-test/src/main/java/com/scalar/db/api/DistributedStorageAdminImportTableIntegrationTestBase.java @@ -4,12 +4,19 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.scalar.db.exception.storage.ExecutionException; +import com.scalar.db.io.Column; +import com.scalar.db.io.DataType; import com.scalar.db.service.StorageFactory; +import java.sql.SQLException; +import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; -import java.util.Map.Entry; +import java.util.Optional; import java.util.Properties; +import java.util.Set; +import java.util.stream.Collectors; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -26,9 +33,9 @@ public abstract class DistributedStorageAdminImportTableIntegrationTestBase { private static final String TEST_NAME = "storage_admin_import_table"; private static final String NAMESPACE = "int_test_" + TEST_NAME; - private final Map tables = new HashMap<>(); - + private final List testDataList = new ArrayList<>(); protected DistributedStorageAdmin admin; + protected DistributedStorage storage; @BeforeAll public void beforeAll() throws Exception { @@ -48,13 +55,11 @@ protected Map getCreationOptions() { } private void dropTable() throws Exception { - for (Entry entry : tables.entrySet()) { - String table = entry.getKey(); - TableMetadata metadata = entry.getValue(); - if (metadata == null) { - dropNonImportableTable(table); + for (TestData testData : testDataList) { + if (!testData.isSupportedTable()) { + dropNonImportableTable(testData.getTableName()); } else { - admin.dropTable(getNamespace(), table); + admin.dropTable(getNamespace(), testData.getTableName()); } } if (!admin.namespaceExists(getNamespace())) { @@ -68,6 +73,7 @@ private void dropTable() throws Exception { protected void setUp() throws Exception { StorageFactory factory = StorageFactory.create(getProperties(TEST_NAME)); admin = factory.getStorageAdmin(); + storage = factory.getStorage(); } @AfterEach @@ -90,27 +96,29 @@ protected void afterEach() { @AfterAll protected void afterAll() throws Exception {} - protected abstract Map createExistingDatabaseWithAllDataTypes() - throws Exception; + protected abstract List createExistingDatabaseWithAllDataTypes() throws SQLException; protected abstract void dropNonImportableTable(String table) throws Exception; @Test public void importTable_ShouldWorkProperly() throws Exception { // Arrange - tables.putAll(createExistingDatabaseWithAllDataTypes()); + testDataList.addAll(createExistingDatabaseWithAllDataTypes()); // Act Assert - for (Entry entry : tables.entrySet()) { - String table = entry.getKey(); - TableMetadata metadata = entry.getValue(); - if (metadata == null) { - importTable_ForNonImportableTable_ShouldThrowIllegalArgumentException(table); + for (TestData testData : testDataList) { + if (!testData.isSupportedTable()) { + importTable_ForNonImportableTable_ShouldThrowIllegalArgumentException( + testData.getTableName()); } else { - importTable_ForImportableTable_ShouldImportProperly(table, metadata); + importTable_ForImportableTable_ShouldImportProperly( + testData.getTableName(), + testData.getOverrideColumnsType(), + testData.getTableMetadata()); } } importTable_ForNonExistingTable_ShouldThrowIllegalArgumentException(); + importTable_ForImportedTable_ShouldPutThenGetCorrectly(); } @Test @@ -123,9 +131,10 @@ public void importTable_ForUnsupportedDatabase_ShouldThrowUnsupportedOperationEx } private void importTable_ForImportableTable_ShouldImportProperly( - String table, TableMetadata metadata) throws ExecutionException { + String table, Map overrideColumnsType, TableMetadata metadata) + throws ExecutionException { // Act - admin.importTable(getNamespace(), table, Collections.emptyMap()); + admin.importTable(getNamespace(), table, Collections.emptyMap(), overrideColumnsType); // Assert assertThat(admin.namespaceExists(getNamespace())).isTrue(); @@ -147,4 +156,70 @@ private void importTable_ForNonExistingTable_ShouldThrowIllegalArgumentException () -> admin.importTable(getNamespace(), "non-existing-table", Collections.emptyMap())) .isInstanceOf(IllegalArgumentException.class); } + + public void importTable_ForImportedTable_ShouldPutThenGetCorrectly() throws ExecutionException { + // Arrange + List puts = + testDataList.stream() + .filter(TestData::isSupportedTable) + .map(td -> td.getPut(getNamespace(), td.getTableName())) + .collect(Collectors.toList()); + List gets = + testDataList.stream() + .filter(TestData::isSupportedTable) + .map(td -> td.getGet(getNamespace(), td.getTableName())) + .collect(Collectors.toList()); + + // Act + for (Put put : puts) { + storage.put(put); + } + List> results = new ArrayList<>(); + for (Get get : gets) { + results.add(storage.get(get)); + } + + // Assert + for (int i = 0; i < results.size(); i++) { + Put put = puts.get(i); + Optional optResult = results.get(i); + + assertThat(optResult).isPresent(); + Result result = optResult.get(); + Set actualColumnNamesWithoutKeys = new HashSet<>(result.getContainedColumnNames()); + actualColumnNamesWithoutKeys.removeAll( + put.getPartitionKey().getColumns().stream() + .map(Column::getName) + .collect(Collectors.toSet())); + + assertThat(actualColumnNamesWithoutKeys) + .containsExactlyInAnyOrderElementsOf(put.getContainedColumnNames()); + result.getColumns().entrySet().stream() + .filter( + e -> { + // Filter partition key columns + return !put.getPartitionKey().getColumns().contains(e.getValue()); + }) + .forEach( + entry -> + // Assert each result column is equal to the column inserted with the put + assertThat(entry.getValue()).isEqualTo(put.getColumns().get(entry.getKey()))); + } + } + + public interface TestData { + boolean isSupportedTable(); + + String getTableName(); + + Map getOverrideColumnsType(); + + TableMetadata getTableMetadata(); + + Insert getInsert(String namespace, String table); + + Put getPut(String namespace, String table); + + Get getGet(String namespace, String table); + } } diff --git a/integration-test/src/main/java/com/scalar/db/api/DistributedTransactionAdminImportTableIntegrationTestBase.java b/integration-test/src/main/java/com/scalar/db/api/DistributedTransactionAdminImportTableIntegrationTestBase.java index 49f3bb28b9..5acdcedcef 100644 --- a/integration-test/src/main/java/com/scalar/db/api/DistributedTransactionAdminImportTableIntegrationTestBase.java +++ b/integration-test/src/main/java/com/scalar/db/api/DistributedTransactionAdminImportTableIntegrationTestBase.java @@ -3,13 +3,21 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import com.scalar.db.api.DistributedStorageAdminImportTableIntegrationTestBase.TestData; import com.scalar.db.exception.storage.ExecutionException; +import com.scalar.db.exception.transaction.TransactionException; +import com.scalar.db.io.Column; +import com.scalar.db.io.DataType; import com.scalar.db.service.TransactionFactory; +import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; -import java.util.Map.Entry; +import java.util.Optional; import java.util.Properties; +import java.util.Set; +import java.util.stream.Collectors; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -26,9 +34,10 @@ public abstract class DistributedTransactionAdminImportTableIntegrationTestBase private static final String TEST_NAME = "tx_admin_import_table"; private static final String NAMESPACE = "int_test_" + TEST_NAME; - private final Map tables = new HashMap<>(); + private final List testDataList = new ArrayList<>(); protected DistributedTransactionAdmin admin; + protected DistributedTransactionManager manager; @BeforeAll public void beforeAll() throws Exception { @@ -48,13 +57,11 @@ protected Map getCreationOptions() { } private void dropTable() throws Exception { - for (Entry entry : tables.entrySet()) { - String table = entry.getKey(); - TableMetadata metadata = entry.getValue(); - if (metadata == null) { - dropNonImportableTable(table); + for (TestData testData : testDataList) { + if (!testData.isSupportedTable()) { + dropNonImportableTable(testData.getTableName()); } else { - admin.dropTable(getNamespace(), table); + admin.dropTable(getNamespace(), testData.getTableName()); } } if (!admin.namespaceExists(getNamespace())) { @@ -62,12 +69,15 @@ private void dropTable() throws Exception { admin.repairNamespace(getNamespace(), getCreationOptions()); } admin.dropNamespace(getNamespace()); + admin.dropCoordinatorTables(); } @BeforeEach protected void setUp() throws Exception { TransactionFactory factory = TransactionFactory.create(getProperties(TEST_NAME)); admin = factory.getTransactionAdmin(); + manager = factory.getTransactionManager(); + admin.createCoordinatorTables(true); } @AfterEach @@ -90,27 +100,29 @@ protected void afterEach() { @AfterAll protected void afterAll() throws Exception {} - protected abstract Map createExistingDatabaseWithAllDataTypes() - throws Exception; + protected abstract List createExistingDatabaseWithAllDataTypes() throws Exception; protected abstract void dropNonImportableTable(String table) throws Exception; @Test public void importTable_ShouldWorkProperly() throws Exception { // Arrange - tables.putAll(createExistingDatabaseWithAllDataTypes()); + testDataList.addAll(createExistingDatabaseWithAllDataTypes()); // Act Assert - for (Entry entry : tables.entrySet()) { - String table = entry.getKey(); - TableMetadata metadata = entry.getValue(); - if (metadata == null) { - importTable_ForNonImportableTable_ShouldThrowIllegalArgumentException(table); + for (TestData testData : testDataList) { + if (!testData.isSupportedTable()) { + importTable_ForNonImportableTable_ShouldThrowIllegalArgumentException( + testData.getTableName()); } else { - importTable_ForImportableTable_ShouldImportProperly(table, metadata); + importTable_ForImportableTable_ShouldImportProperly( + testData.getTableName(), + testData.getOverrideColumnsType(), + testData.getTableMetadata()); } } importTable_ForNonExistingTable_ShouldThrowIllegalArgumentException(); + importTable_ForImportedTable_ShouldInsertThenGetCorrectly(); } @Test @@ -123,9 +135,10 @@ public void importTable_ForUnsupportedDatabase_ShouldThrowUnsupportedOperationEx } private void importTable_ForImportableTable_ShouldImportProperly( - String table, TableMetadata metadata) throws ExecutionException { + String table, Map overrideColumnsType, TableMetadata metadata) + throws ExecutionException { // Act - admin.importTable(getNamespace(), table, Collections.emptyMap()); + admin.importTable(getNamespace(), table, Collections.emptyMap(), overrideColumnsType); // Assert assertThat(admin.namespaceExists(getNamespace())).isTrue(); @@ -145,4 +158,60 @@ private void importTable_ForNonExistingTable_ShouldThrowIllegalArgumentException () -> admin.importTable(getNamespace(), "non-existing-table", Collections.emptyMap())) .isInstanceOf(IllegalArgumentException.class); } + + public void importTable_ForImportedTable_ShouldInsertThenGetCorrectly() + throws TransactionException { + // Arrange + List inserts = + testDataList.stream() + .filter(TestData::isSupportedTable) + .map(td -> td.getInsert(getNamespace(), td.getTableName())) + .collect(Collectors.toList()); + List gets = + testDataList.stream() + .filter(TestData::isSupportedTable) + .map(td -> td.getGet(getNamespace(), td.getTableName())) + .collect(Collectors.toList()); + + // Act + DistributedTransaction tx = manager.start(); + for (Insert insert : inserts) { + tx.insert(insert); + } + tx.commit(); + + List> results = new ArrayList<>(); + tx = manager.start(); + for (Get get : gets) { + results.add(tx.get(get)); + } + tx.commit(); + + // Assert + for (int i = 0; i < results.size(); i++) { + Insert insert = inserts.get(i); + Optional optResult = results.get(i); + + assertThat(optResult).isPresent(); + Result result = optResult.get(); + Set actualColumnNamesWithoutKeys = new HashSet<>(result.getContainedColumnNames()); + actualColumnNamesWithoutKeys.removeAll( + insert.getPartitionKey().getColumns().stream() + .map(Column::getName) + .collect(Collectors.toSet())); + + assertThat(actualColumnNamesWithoutKeys) + .containsExactlyInAnyOrderElementsOf(insert.getColumns().keySet()); + result.getColumns().entrySet().stream() + .filter( + e -> { + // Filter partition key columns + return !insert.getPartitionKey().getColumns().contains(e.getValue()); + }) + .forEach( + entry -> + // Assert each result column is equal to the column inserted with the put + assertThat(entry.getValue()).isEqualTo(insert.getColumns().get(entry.getKey()))); + } + } } diff --git a/integration-test/src/main/java/com/scalar/db/schemaloader/SchemaLoaderImportIntegrationTestBase.java b/integration-test/src/main/java/com/scalar/db/schemaloader/SchemaLoaderImportIntegrationTestBase.java index a894e43f48..1612bc20de 100644 --- a/integration-test/src/main/java/com/scalar/db/schemaloader/SchemaLoaderImportIntegrationTestBase.java +++ b/integration-test/src/main/java/com/scalar/db/schemaloader/SchemaLoaderImportIntegrationTestBase.java @@ -7,6 +7,8 @@ import com.google.gson.Gson; import com.scalar.db.api.DistributedStorageAdmin; import com.scalar.db.api.DistributedTransactionAdmin; +import com.scalar.db.api.TableMetadata; +import com.scalar.db.io.DataType; import com.scalar.db.service.StorageFactory; import com.scalar.db.service.TransactionFactory; import java.io.IOException; @@ -85,7 +87,10 @@ protected String getNamespace2() { protected Map getImportSchemaJsonMap() { return ImmutableMap.of( namespace1 + "." + TABLE_1, - ImmutableMap.builder().put("transaction", true).build(), + ImmutableMap.builder() + .put("transaction", true) + .put("override-columns-type", getImportableTableOverrideColumnsType()) + .build(), namespace2 + "." + TABLE_2, ImmutableMap.builder().put("transaction", false).build()); } @@ -149,6 +154,10 @@ private void dropTablesIfExist() throws Exception { protected abstract void dropNonImportableTable(String namespace, String table) throws Exception; + protected abstract Map getImportableTableOverrideColumnsType(); + + protected abstract TableMetadata getImportableTableMetadata(boolean hasTypeOverride); + protected void waitForDifferentSessionDdl() { // No wait by default. } @@ -163,9 +172,11 @@ public void importTables_ImportableTablesGiven_ShouldImportProperly() throws Exc storageAdmin.createNamespace(namespace2); waitForDifferentSessionDdl(); + // TABLE_1 set options to override column types. createImportableTable(namespace1, TABLE_1); waitForDifferentSessionDdl(); + // TABLE_2 does not set options to override column types. createImportableTable(namespace2, TABLE_2); // Act @@ -177,6 +188,10 @@ public void importTables_ImportableTablesGiven_ShouldImportProperly() throws Exc assertThat(exitCode).isEqualTo(0); assertThat(transactionAdmin.tableExists(namespace1, TABLE_1)).isTrue(); assertThat(storageAdmin.tableExists(namespace2, TABLE_2)).isTrue(); + assertThat(transactionAdmin.getTableMetadata(namespace1, TABLE_1)) + .isEqualTo(getImportableTableMetadata(true)); + assertThat(storageAdmin.getTableMetadata(namespace2, TABLE_2)) + .isEqualTo(getImportableTableMetadata(false)); assertThat(transactionAdmin.coordinatorTablesExist()).isFalse(); } diff --git a/schema-loader/sample/import_schema_sample.json b/schema-loader/sample/import_schema_sample.json index 3078dea3a2..b1431569ff 100644 --- a/schema-loader/sample/import_schema_sample.json +++ b/schema-loader/sample/import_schema_sample.json @@ -1,6 +1,10 @@ { "sample_namespace1.sample_table1": { - "transaction": true + "transaction": true, + "override-columns-type": { + "c3": "TIME", + "c5": "TIMESTAMP" + } }, "sample_namespace1.sample_table2": { "transaction": true diff --git a/schema-loader/src/main/java/com/scalar/db/schemaloader/ImportTableSchema.java b/schema-loader/src/main/java/com/scalar/db/schemaloader/ImportTableSchema.java index 94ee36b424..5c114de096 100644 --- a/schema-loader/src/main/java/com/scalar/db/schemaloader/ImportTableSchema.java +++ b/schema-loader/src/main/java/com/scalar/db/schemaloader/ImportTableSchema.java @@ -2,18 +2,23 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.scalar.db.common.error.CoreError; +import com.scalar.db.io.DataType; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import javax.annotation.concurrent.Immutable; @Immutable public class ImportTableSchema { + private static final String OVERRIDE_COLUMNS_TYPE = "override-columns-type"; private final String namespace; private final String tableName; private final boolean isTransactionTable; private final ImmutableMap options; + private final ImmutableMap overrideColumnsType; public ImportTableSchema( String tableFullName, JsonObject tableDefinition, Map options) { @@ -30,9 +35,31 @@ public ImportTableSchema( } else { isTransactionTable = true; } + this.overrideColumnsType = parseOverrideColumnsType(tableFullName, tableDefinition); this.options = buildOptions(tableDefinition, options); } + private ImmutableMap parseOverrideColumnsType( + String tableFullName, JsonObject tableDefinition) { + if (!tableDefinition.has(OVERRIDE_COLUMNS_TYPE)) { + return ImmutableMap.of(); + } + JsonObject columns = tableDefinition.getAsJsonObject(OVERRIDE_COLUMNS_TYPE); + ImmutableMap.Builder columnsBuilder = ImmutableMap.builder(); + for (Entry column : columns.entrySet()) { + String columnName = column.getKey(); + String columnDataType = column.getValue().getAsString().trim(); + DataType dataType = TableSchema.DATA_MAP_TYPE.get(columnDataType.toUpperCase()); + if (dataType == null) { + throw new IllegalArgumentException( + CoreError.SCHEMA_LOADER_PARSE_ERROR_INVALID_COLUMN_TYPE.buildMessage( + tableFullName, columnName, column.getValue().getAsString())); + } + columnsBuilder.put(columnName, dataType); + } + return columnsBuilder.buildKeepingLast(); + } + // For the SpotBugs warning CT_CONSTRUCTOR_THROW @Override protected final void finalize() {} @@ -47,7 +74,8 @@ private ImmutableMap buildOptions( TableSchema.CLUSTERING_KEY, TableSchema.TRANSACTION, TableSchema.COLUMNS, - TableSchema.SECONDARY_INDEX); + TableSchema.SECONDARY_INDEX, + OVERRIDE_COLUMNS_TYPE); tableDefinition.entrySet().stream() .filter(entry -> !keysToIgnore.contains(entry.getKey())) .forEach(entry -> optionsBuilder.put(entry.getKey(), entry.getValue().getAsString())); @@ -70,4 +98,8 @@ public boolean isTransactionTable() { public Map getOptions() { return options; } + + public Map getOverrideColumnsType() { + return overrideColumnsType; + } } diff --git a/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaOperator.java b/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaOperator.java index e08fac0aca..6eca75ef81 100644 --- a/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaOperator.java +++ b/schema-loader/src/main/java/com/scalar/db/schemaloader/SchemaOperator.java @@ -408,9 +408,13 @@ public void importTables(List tableSchemaList, Map DATA_MAP_TYPE = + static final ImmutableMap DATA_MAP_TYPE = ImmutableMap.builder() .put("BOOLEAN", DataType.BOOLEAN) .put("INT", DataType.INT) diff --git a/schema-loader/src/test/java/com/scalar/db/schemaloader/ImportTableSchemaTest.java b/schema-loader/src/test/java/com/scalar/db/schemaloader/ImportTableSchemaTest.java index 84265d52dd..65515f40c7 100644 --- a/schema-loader/src/test/java/com/scalar/db/schemaloader/ImportTableSchemaTest.java +++ b/schema-loader/src/test/java/com/scalar/db/schemaloader/ImportTableSchemaTest.java @@ -1,10 +1,12 @@ package com.scalar.db.schemaloader; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.entry; import com.google.common.collect.ImmutableMap; import com.google.gson.JsonObject; import com.google.gson.JsonParser; +import com.scalar.db.io.DataType; import java.util.Collections; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.BeforeEach; @@ -22,8 +24,7 @@ public void setUp() throws Exception { } @Test - public void constructor_DefinitionWithTransactionTrueGiven_ShouldConstructProperTableSchema() - throws SchemaLoaderException { + public void constructor_DefinitionWithTransactionTrueGiven_ShouldConstructProperTableSchema() { String tableDefinitionJson = "{\"transaction\": true}"; JsonObject tableDefinition = JsonParser.parseString(tableDefinitionJson).getAsJsonObject(); @@ -32,15 +33,15 @@ public void constructor_DefinitionWithTransactionTrueGiven_ShouldConstructProper new ImportTableSchema("ns.tbl", tableDefinition, Collections.emptyMap()); // Assert - Assertions.assertThat(tableSchema.getNamespace()).isEqualTo("ns"); - Assertions.assertThat(tableSchema.getTable()).isEqualTo("tbl"); - Assertions.assertThat(tableSchema.isTransactionTable()).isEqualTo(true); - Assertions.assertThat(tableSchema.getOptions()).isEmpty(); + assertThat(tableSchema.getNamespace()).isEqualTo("ns"); + assertThat(tableSchema.getTable()).isEqualTo("tbl"); + assertThat(tableSchema.isTransactionTable()).isEqualTo(true); + assertThat(tableSchema.getOptions()).isEmpty(); + assertThat(tableSchema.getOverrideColumnsType()).isEmpty(); } @Test - public void constructor_DefinitionWithTransactionFalseGiven_ShouldConstructProperTableSchema() - throws SchemaLoaderException { + public void constructor_DefinitionWithTransactionFalseGiven_ShouldConstructProperTableSchema() { String tableDefinitionJson = "{\"transaction\": false}"; JsonObject tableDefinition = JsonParser.parseString(tableDefinitionJson).getAsJsonObject(); @@ -49,10 +50,37 @@ public void constructor_DefinitionWithTransactionFalseGiven_ShouldConstructPrope new ImportTableSchema("ns.tbl", tableDefinition, Collections.emptyMap()); // Assert - Assertions.assertThat(tableSchema.getNamespace()).isEqualTo("ns"); - Assertions.assertThat(tableSchema.getTable()).isEqualTo("tbl"); - Assertions.assertThat(tableSchema.isTransactionTable()).isEqualTo(false); - Assertions.assertThat(tableSchema.getOptions()).isEmpty(); + assertThat(tableSchema.getNamespace()).isEqualTo("ns"); + assertThat(tableSchema.getTable()).isEqualTo("tbl"); + assertThat(tableSchema.isTransactionTable()).isEqualTo(false); + assertThat(tableSchema.getOptions()).isEmpty(); + assertThat(tableSchema.getOverrideColumnsType()).isEmpty(); + } + + @Test + public void + constructor_DefinitionWithOverrideColumnsTypeGiven_ShouldConstructProperTableSchema() { + String tableDefinitionJson = + "{" + + " \"transaction\": true," + + " \"override-columns-type\": {" + + " \"c3\": \"TIME\"," + + " \"c5\": \"TIMESTAMP\"" + + " }" + + " }"; + JsonObject tableDefinition = JsonParser.parseString(tableDefinitionJson).getAsJsonObject(); + + // Act + ImportTableSchema tableSchema = + new ImportTableSchema("ns.tbl", tableDefinition, Collections.emptyMap()); + + // Assert + assertThat(tableSchema.getNamespace()).isEqualTo("ns"); + assertThat(tableSchema.getTable()).isEqualTo("tbl"); + assertThat(tableSchema.isTransactionTable()).isEqualTo(true); + assertThat(tableSchema.getOptions()).isEmpty(); + assertThat(tableSchema.getOverrideColumnsType()) + .containsOnly(entry("c3", DataType.TIME), entry("c5", DataType.TIMESTAMP)); } @Test @@ -67,8 +95,7 @@ public void constructor_WrongFormatTableFullNameGiven_ShouldThrowIllegalArgument } @Test - public void constructor_DefinitionWithoutTransactionGiven_ShouldConstructProperTableSchema() - throws SchemaLoaderException { + public void constructor_DefinitionWithoutTransactionGiven_ShouldConstructProperTableSchema() { String tableDefinitionJson = "{}"; JsonObject tableDefinition = JsonParser.parseString(tableDefinitionJson).getAsJsonObject(); @@ -77,17 +104,17 @@ public void constructor_DefinitionWithoutTransactionGiven_ShouldConstructProperT new ImportTableSchema("ns.tbl", tableDefinition, Collections.emptyMap()); // Assert - Assertions.assertThat(tableSchema.getNamespace()).isEqualTo("ns"); - Assertions.assertThat(tableSchema.getTable()).isEqualTo("tbl"); - Assertions.assertThat(tableSchema.isTransactionTable()).isEqualTo(true); - Assertions.assertThat(tableSchema.getOptions()).isEmpty(); + assertThat(tableSchema.getNamespace()).isEqualTo("ns"); + assertThat(tableSchema.getTable()).isEqualTo("tbl"); + assertThat(tableSchema.isTransactionTable()).isEqualTo(true); + assertThat(tableSchema.getOptions()).isEmpty(); + assertThat(tableSchema.getOverrideColumnsType()).isEmpty(); } @Test - public void constructor_DefinitionWithGlobalAndSchemaOptions_ShouldConstructWithProperOptions() - throws SchemaLoaderException { + public void constructor_DefinitionWithGlobalAndSchemaOptions_ShouldConstructWithProperOptions() { String tableDefinitionJson = - "{\"partition-key\": \"ignored\", \"columns\": \"ignored\", \"clustering-key\": \"ignored\", \"secondary-index\": \"ignored\",\"transaction\": false, \"opt1\": \"schema-opt1\", \"opt3\": \"schema-opt3\"}"; + "{\"partition-key\": \"ignored\", \"columns\": \"ignored\", \"clustering-key\": \"ignored\", \"secondary-index\": \"ignored\",\"transaction\": false, \"opt1\": \"schema-opt1\", \"opt3\": \"schema-opt3\", \"override-columns-type\": {\"c1\": \"DOUBLE\"}}"; JsonObject tableDefinition = JsonParser.parseString(tableDefinitionJson).getAsJsonObject(); // Act @@ -98,13 +125,14 @@ public void constructor_DefinitionWithGlobalAndSchemaOptions_ShouldConstructWith ImmutableMap.of("opt1", "global-opt1", "opt2", "global-opt2")); // Assert - Assertions.assertThat(tableSchema.getNamespace()).isEqualTo("ns"); - Assertions.assertThat(tableSchema.getTable()).isEqualTo("tbl"); - Assertions.assertThat(tableSchema.isTransactionTable()).isEqualTo(false); - Assertions.assertThat(tableSchema.getOptions()) + assertThat(tableSchema.getNamespace()).isEqualTo("ns"); + assertThat(tableSchema.getTable()).isEqualTo("tbl"); + assertThat(tableSchema.isTransactionTable()).isEqualTo(false); + assertThat(tableSchema.getOptions()) .containsOnly( entry("opt1", "schema-opt1"), entry("opt2", "global-opt2"), entry("opt3", "schema-opt3")); + assertThat(tableSchema.getOverrideColumnsType()).containsOnly(entry("c1", DataType.DOUBLE)); } } diff --git a/schema-loader/src/test/java/com/scalar/db/schemaloader/SchemaOperatorTest.java b/schema-loader/src/test/java/com/scalar/db/schemaloader/SchemaOperatorTest.java index c0a5039c85..9bc58c13fa 100644 --- a/schema-loader/src/test/java/com/scalar/db/schemaloader/SchemaOperatorTest.java +++ b/schema-loader/src/test/java/com/scalar/db/schemaloader/SchemaOperatorTest.java @@ -512,12 +512,14 @@ public void importTables_WithTransactionTables_ShouldCallProperMethods() throws when(importTableSchema.getNamespace()).thenReturn("ns"); when(importTableSchema.isTransactionTable()).thenReturn(true); when(importTableSchema.getTable()).thenReturn("tb"); + Map overrideColumnsType = ImmutableMap.of("c1", DataType.INT); + when(importTableSchema.getOverrideColumnsType()).thenReturn(overrideColumnsType); // Act operator.importTables(tableSchemaList, options); // Assert - verify(transactionAdmin, times(3)).importTable("ns", "tb", options); + verify(transactionAdmin, times(3)).importTable("ns", "tb", options, overrideColumnsType); verifyNoInteractions(storageAdmin); } @@ -529,12 +531,14 @@ public void importTables_WithoutTransactionTables_ShouldCallProperMethods() thro when(importTableSchema.getNamespace()).thenReturn("ns"); when(importTableSchema.isTransactionTable()).thenReturn(false); when(importTableSchema.getTable()).thenReturn("tb"); + Map overrideColumnsType = ImmutableMap.of("c1", DataType.INT); + when(importTableSchema.getOverrideColumnsType()).thenReturn(overrideColumnsType); // Act operator.importTables(tableSchemaList, options); // Assert - verify(storageAdmin, times(3)).importTable("ns", "tb", options); + verify(storageAdmin, times(3)).importTable("ns", "tb", options, overrideColumnsType); verifyNoInteractions(transactionAdmin); }