From e75c8d14270a5f12fcf44cde05eb151575496d15 Mon Sep 17 00:00:00 2001 From: Rahul Mahadev Date: Thu, 1 Aug 2019 20:06:21 +0000 Subject: [PATCH] Refactoring io.delta package to io.delta.tables Staging this PR for now. Changing the namespace `io.delta` to `io.delta.tables` No new tests added, re ran old tests. Author: Rahul Mahadev GitOrigin-RevId: ab6a5968a29f698426bea27db0f33f431da637ef --- build.sbt | 6 ++-- .../{ => tables}/DeltaMergeBuilder.scala | 2 +- .../io/delta/{ => tables}/DeltaTable.scala | 4 +-- .../execution/DeltaTableOperations.scala | 4 +-- .../{ => tables}/JavaDeltaTableSuite.java | 2 +- .../spark/sql/delta/DeleteJavaSuite.java | 2 +- .../spark/sql/delta/MergeIntoJavaSuite.java | 2 +- .../spark/sql/delta/UpdateJavaSuite.java | 2 +- .../delta/{ => tables}/DeltaTableSuite.scala | 2 +- .../spark/sql/delta/DeleteScalaSuite.scala | 14 ++++---- .../spark/sql/delta/DeltaVacuumSuite.scala | 2 +- .../sql/delta/DescribeDeltaHistorySuite.scala | 4 +-- .../spark/sql/delta/MergeIntoScalaSuite.scala | 36 +++++++++---------- .../spark/sql/delta/UpdateScalaSuite.scala | 16 ++++----- 14 files changed, 49 insertions(+), 49 deletions(-) rename src/main/scala/io/delta/{ => tables}/DeltaMergeBuilder.scala (99%) rename src/main/scala/io/delta/{ => tables}/DeltaTable.scala (99%) rename src/main/scala/io/delta/{ => tables}/execution/DeltaTableOperations.scala (98%) rename src/test/java/io/delta/{ => tables}/JavaDeltaTableSuite.java (98%) rename src/test/scala/io/delta/{ => tables}/DeltaTableSuite.scala (98%) diff --git a/build.sbt b/build.sbt index d484f60c999..59d385eb380 100644 --- a/build.sbt +++ b/build.sbt @@ -90,14 +90,14 @@ enablePlugins(GenJavadocPlugin, JavaUnidocPlugin, ScalaUnidocPlugin) // Configure Scala unidoc scalacOptions in(ScalaUnidoc, unidoc) ++= Seq( - "-skip-packages", "org:com:io.delta.execution", + "-skip-packages", "org:com:io.delta.tables.execution", "-doc-title", "Delta Lake " + version.value.replaceAll("-SNAPSHOT", "") + " ScalaDoc" ) // Configure Java unidoc javacOptions in(JavaUnidoc, unidoc) := Seq( "-public", - "-exclude", "org:com:io.delta.execution", + "-exclude", "org:com:io.delta.tables.execution", "-windowtitle", "Delta Lake " + version.value.replaceAll("-SNAPSHOT", "") + " JavaDoc", "-noqualifier", "java.lang", "-tag", "return:X" @@ -107,7 +107,7 @@ javacOptions in(JavaUnidoc, unidoc) := Seq( def ignoreUndocumentedPackages(packages: Seq[Seq[java.io.File]]): Seq[Seq[java.io.File]] = { packages .map(_.filterNot(_.getName.contains("$"))) - .map(_.filterNot(_.getCanonicalPath.contains("io/delta/execution"))) + .map(_.filterNot(_.getCanonicalPath.contains("io/delta/tables/execution"))) .map(_.filterNot(_.getCanonicalPath.contains("spark"))) } diff --git a/src/main/scala/io/delta/DeltaMergeBuilder.scala b/src/main/scala/io/delta/tables/DeltaMergeBuilder.scala similarity index 99% rename from src/main/scala/io/delta/DeltaMergeBuilder.scala rename to src/main/scala/io/delta/tables/DeltaMergeBuilder.scala index 71cf62aec08..d2293f8b691 100644 --- a/src/main/scala/io/delta/DeltaMergeBuilder.scala +++ b/src/main/scala/io/delta/tables/DeltaMergeBuilder.scala @@ -14,7 +14,7 @@ * limitations under the License. */ -package io.delta +package io.delta.tables import scala.collection.JavaConverters._ import scala.collection.Map diff --git a/src/main/scala/io/delta/DeltaTable.scala b/src/main/scala/io/delta/tables/DeltaTable.scala similarity index 99% rename from src/main/scala/io/delta/DeltaTable.scala rename to src/main/scala/io/delta/tables/DeltaTable.scala index 030f3aebaea..85fcb1860a3 100644 --- a/src/main/scala/io/delta/DeltaTable.scala +++ b/src/main/scala/io/delta/tables/DeltaTable.scala @@ -14,12 +14,12 @@ * limitations under the License. */ -package io.delta +package io.delta.tables import scala.collection.JavaConverters._ import org.apache.spark.sql.delta._ -import io.delta.execution._ +import io.delta.tables.execution._ import org.apache.hadoop.fs.Path import org.apache.spark.annotation.InterfaceStability._ diff --git a/src/main/scala/io/delta/execution/DeltaTableOperations.scala b/src/main/scala/io/delta/tables/execution/DeltaTableOperations.scala similarity index 98% rename from src/main/scala/io/delta/execution/DeltaTableOperations.scala rename to src/main/scala/io/delta/tables/execution/DeltaTableOperations.scala index 2942ff28d1f..7cfa9f559ea 100644 --- a/src/main/scala/io/delta/execution/DeltaTableOperations.scala +++ b/src/main/scala/io/delta/tables/execution/DeltaTableOperations.scala @@ -14,7 +14,7 @@ * limitations under the License. */ -package io.delta.execution +package io.delta.tables.execution import scala.collection.Map @@ -22,7 +22,7 @@ import org.apache.spark.sql.delta.PreprocessTableUpdate import org.apache.spark.sql.delta.{DeltaErrors, DeltaFullTable, DeltaHistoryManager, DeltaLog} import org.apache.spark.sql.delta.commands.{DeleteCommand, VacuumCommand} import org.apache.spark.sql.delta.util.AnalysisHelper -import io.delta.DeltaTable +import io.delta.tables.DeltaTable import org.apache.spark.sql.{functions, Column, DataFrame, SparkSession} import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases diff --git a/src/test/java/io/delta/JavaDeltaTableSuite.java b/src/test/java/io/delta/tables/JavaDeltaTableSuite.java similarity index 98% rename from src/test/java/io/delta/JavaDeltaTableSuite.java rename to src/test/java/io/delta/tables/JavaDeltaTableSuite.java index 398b392c615..2070f059e9b 100644 --- a/src/test/java/io/delta/JavaDeltaTableSuite.java +++ b/src/test/java/io/delta/tables/JavaDeltaTableSuite.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package io.delta; +package io.delta.tables; import java.util.Arrays; import java.util.List; diff --git a/src/test/java/org/apache/spark/sql/delta/DeleteJavaSuite.java b/src/test/java/org/apache/spark/sql/delta/DeleteJavaSuite.java index b868d9eb1ed..45e0add17f3 100644 --- a/src/test/java/org/apache/spark/sql/delta/DeleteJavaSuite.java +++ b/src/test/java/org/apache/spark/sql/delta/DeleteJavaSuite.java @@ -22,7 +22,7 @@ import scala.Tuple2; -import io.delta.DeltaTable; +import io.delta.tables.DeltaTable; import org.junit.After; import org.junit.Assert; import org.junit.Assume; diff --git a/src/test/java/org/apache/spark/sql/delta/MergeIntoJavaSuite.java b/src/test/java/org/apache/spark/sql/delta/MergeIntoJavaSuite.java index 5ba29defebc..2841242812f 100644 --- a/src/test/java/org/apache/spark/sql/delta/MergeIntoJavaSuite.java +++ b/src/test/java/org/apache/spark/sql/delta/MergeIntoJavaSuite.java @@ -24,7 +24,7 @@ import scala.Tuple2; -import io.delta.DeltaTable; +import io.delta.tables.DeltaTable; import org.apache.spark.sql.*; import org.apache.spark.util.Utils; diff --git a/src/test/java/org/apache/spark/sql/delta/UpdateJavaSuite.java b/src/test/java/org/apache/spark/sql/delta/UpdateJavaSuite.java index dec9945308a..7d455f109a3 100644 --- a/src/test/java/org/apache/spark/sql/delta/UpdateJavaSuite.java +++ b/src/test/java/org/apache/spark/sql/delta/UpdateJavaSuite.java @@ -20,7 +20,7 @@ import scala.Tuple2; -import io.delta.DeltaTable; +import io.delta.tables.DeltaTable; import org.junit.*; import org.apache.spark.sql.*; diff --git a/src/test/scala/io/delta/DeltaTableSuite.scala b/src/test/scala/io/delta/tables/DeltaTableSuite.scala similarity index 98% rename from src/test/scala/io/delta/DeltaTableSuite.scala rename to src/test/scala/io/delta/tables/DeltaTableSuite.scala index 6dbfed37234..018212cea1e 100644 --- a/src/test/scala/io/delta/DeltaTableSuite.scala +++ b/src/test/scala/io/delta/tables/DeltaTableSuite.scala @@ -14,7 +14,7 @@ * limitations under the License. */ -package io.delta +package io.delta.tables import java.util.Locale diff --git a/src/test/scala/org/apache/spark/sql/delta/DeleteScalaSuite.scala b/src/test/scala/org/apache/spark/sql/delta/DeleteScalaSuite.scala index 78f95658903..e8a66f72197 100644 --- a/src/test/scala/org/apache/spark/sql/delta/DeleteScalaSuite.scala +++ b/src/test/scala/org/apache/spark/sql/delta/DeleteScalaSuite.scala @@ -16,7 +16,7 @@ package org.apache.spark.sql.delta -import io.delta.DeltaTable +import io.delta.tables.DeltaTable import org.apache.spark.sql.{functions, Row} @@ -26,21 +26,21 @@ class DeleteScalaSuite extends DeleteSuiteBase { test("delete usage test - without condition") { append(Seq((1, 10), (2, 20), (3, 30), (4, 40)).toDF("key", "value")) - val table = io.delta.DeltaTable.forPath(tempPath) + val table = io.delta.tables.DeltaTable.forPath(tempPath) table.delete() checkAnswer(readDeltaTable(tempPath), Nil) } test("delete usage test - with condition") { append(Seq((1, 10), (2, 20), (3, 30), (4, 40)).toDF("key", "value")) - val table = io.delta.DeltaTable.forPath(tempPath) + val table = io.delta.tables.DeltaTable.forPath(tempPath) table.delete("key = 1 or key = 2") checkAnswer(readDeltaTable(tempPath), Row(3, 30) :: Row(4, 40) :: Nil) } test("delete usage test - with Column condition") { append(Seq((1, 10), (2, 20), (3, 30), (4, 40)).toDF("key", "value")) - val table = io.delta.DeltaTable.forPath(tempPath) + val table = io.delta.tables.DeltaTable.forPath(tempPath) table.delete(functions.expr("key = 1 or key = 2")) checkAnswer(readDeltaTable(tempPath), Row(3, 30) :: Row(4, 40) :: Nil) } @@ -62,14 +62,14 @@ class DeleteScalaSuite extends DeleteSuiteBase { } } - val deltaTable: io.delta.DeltaTable = { + val deltaTable: DeltaTable = { val (tableNameOrPath, optionalAlias) = parse(target) val isPath: Boolean = tableNameOrPath.startsWith("delta.") val table = if (isPath) { val path = tableNameOrPath.stripPrefix("delta.`").stripSuffix("`") - io.delta.DeltaTable.forPath(spark, path) + io.delta.tables.DeltaTable.forPath(spark, path) } else { - io.delta.DeltaTable(spark.table(tableNameOrPath)) + io.delta.tables.DeltaTable(spark.table(tableNameOrPath)) } optionalAlias.map(table.as(_)).getOrElse(table) } diff --git a/src/test/scala/org/apache/spark/sql/delta/DeltaVacuumSuite.scala b/src/test/scala/org/apache/spark/sql/delta/DeltaVacuumSuite.scala index 34d0e8fad04..eae846eca68 100644 --- a/src/test/scala/org/apache/spark/sql/delta/DeltaVacuumSuite.scala +++ b/src/test/scala/org/apache/spark/sql/delta/DeltaVacuumSuite.scala @@ -387,7 +387,7 @@ trait DeltaVacuumSuiteBase extends QueryTest with SharedSQLContext with GivenWhe checkDatasetUnorderly(result.as[String], qualified: _*) case GCScalaApi(expectedDf, retention) => Given("*** Garbage collecting Reservoir using Scala") - val deltaTable = io.delta.DeltaTable.forPath(spark, deltaLog.dataPath.toString) + val deltaTable = io.delta.tables.DeltaTable.forPath(spark, deltaLog.dataPath.toString) val result = if (retention.isDefined) { deltaTable.vacuum(retention.get) } else { diff --git a/src/test/scala/org/apache/spark/sql/delta/DescribeDeltaHistorySuite.scala b/src/test/scala/org/apache/spark/sql/delta/DescribeDeltaHistorySuite.scala index 5cc88507e60..cbc5e9d3ff4 100644 --- a/src/test/scala/org/apache/spark/sql/delta/DescribeDeltaHistorySuite.scala +++ b/src/test/scala/org/apache/spark/sql/delta/DescribeDeltaHistorySuite.scala @@ -52,7 +52,7 @@ trait DescribeDeltaHistorySuiteBase def getHistory(path: String, limit: Option[Int] = None): DataFrame = { val deltaLog = DeltaLog.forTable(spark, path) - val deltaTable = io.delta.DeltaTable.forPath(spark, deltaLog.dataPath.toString) + val deltaTable = io.delta.tables.DeltaTable.forPath(spark, deltaLog.dataPath.toString) if (limit.isDefined) { deltaTable.history(limit.get) } else { @@ -159,7 +159,7 @@ trait DescribeDeltaHistorySuiteBase val tempDir = Utils.createTempDir().toString Seq((1, "a"), (2, "3")).toDF("id", "data").write.format("delta").partitionBy("id").save(tempDir) val deltaLog = DeltaLog.forTable(spark, tempDir) - val deltaTable = io.delta.DeltaTable.forPath(spark, deltaLog.dataPath.toString) + val deltaTable = io.delta.tables.DeltaTable.forPath(spark, deltaLog.dataPath.toString) deltaTable.delete("id = 1") checkLastOperation( diff --git a/src/test/scala/org/apache/spark/sql/delta/MergeIntoScalaSuite.scala b/src/test/scala/org/apache/spark/sql/delta/MergeIntoScalaSuite.scala index e1fd97b1f57..0b8fac28d9e 100644 --- a/src/test/scala/org/apache/spark/sql/delta/MergeIntoScalaSuite.scala +++ b/src/test/scala/org/apache/spark/sql/delta/MergeIntoScalaSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.delta import java.util.Locale -import io.delta._ +import io.delta.tables._ import org.apache.spark.sql._ import org.apache.spark.sql.types.StructType @@ -33,7 +33,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { append(Seq((1, 10), (2, 20)).toDF("key1", "value1"), Nil) // target val source = Seq((1, 100), (3, 30)).toDF("key2", "value2") // source - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "key1 = key2") .whenMatched().updateExpr(Map("key1" -> "key2", "value1" -> "value2")) .whenNotMatched().insertExpr(Map("key1" -> "key2", "value1" -> "value2")) @@ -53,7 +53,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { append(Seq((1, 10), (2, 20), (4, 40)).toDF("key1", "value1"), Nil) // target val source = Seq((1, 100), (3, 30), (4, 41)).toDF("key2", "value2") // source - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "key1 = key2") .whenMatched("key1 = 4").delete() .whenMatched("key2 = 1").updateExpr(Map("key1" -> "key2", "value1" -> "value2")) @@ -74,7 +74,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { append(Seq((1, 10), (2, 20), (4, 40)).toDF("key1", "value1"), Nil) // target val source = Seq((1, 100), (3, 30), (4, 41)).toDF("key2", "value2") // source - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, functions.expr("key1 = key2")) .whenMatched(functions.expr("key1 = 4")).delete() .whenMatched(functions.expr("key2 = 1")) @@ -120,7 +120,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { test("update with empty map should do nothing") { append(Seq((1, 10), (2, 20)).toDF("trgKey", "trgValue"), Nil) // target val source = Seq((1, 100), (3, 30)).toDF("srcKey", "srcValue") // source - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenMatched().updateExpr(Map[String, String]()) .whenNotMatched().insertExpr(Map("trgKey" -> "srcKey", "trgValue" -> "srcValue")) @@ -134,7 +134,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { Nil) // match condition should not be ignored when map is empty - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenMatched("trgKey = 1").updateExpr(Map[String, String]()) .whenMatched().delete() @@ -152,7 +152,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { append(Seq((1, 10), (2, 20)).toDF("trgKey", "trgValue"), Nil) // target val source = Seq((1, 100), (3, 30)).toDF("srcKey", "srcValue") // source val e = intercept[AnalysisException] { - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenMatched().updateExpr(Map("trgKey" -> "srcKey", "trgValue" -> "srcValue")) .whenNotMatched().insertExpr(Map[String, String]()) @@ -169,7 +169,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { // There must be at least one WHEN clause in a MERGE statement var e = intercept[AnalysisException] { - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .execute() } @@ -178,7 +178,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { // When there are 2 MATCHED clauses in a MERGE statement, // the first MATCHED clause must have a condition e = intercept[AnalysisException] { - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenMatched().delete() .whenMatched("trgKey = 1").updateExpr(Map("trgKey" -> "srcKey", "trgValue" -> "srcValue")) @@ -189,7 +189,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { // There must be at most two WHEN clauses in a MERGE statement e = intercept[AnalysisException] { - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenMatched("trgKey = 1").updateExpr(Map("trgKey" -> "srcKey", "trgValue" -> "srcValue")) .whenMatched("trgValue = 3").delete() @@ -202,7 +202,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { // INSERT can appear at most once in NOT MATCHED clauses in a MERGE statement e = intercept[AnalysisException] { - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenNotMatched().insertExpr(Map("trgKey" -> "srcKey + 1", "trgValue" -> "srcValue")) .whenNotMatched().insertExpr(Map("trgKey" -> "srcKey", "trgValue" -> "srcValue")) @@ -213,7 +213,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { // UPDATE can appear at most once in MATCHED clauses in a MERGE statement e = intercept[AnalysisException] { - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenMatched("trgKey = 1").updateExpr(Map("trgKey" -> "srcKey", "trgValue" -> "srcValue")) .whenMatched("trgValue = 2") @@ -226,7 +226,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { // DELETE can appear at most once in MATCHED clauses in a MERGE statement e = intercept[AnalysisException] { - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenMatched("trgKey = 1").delete() .whenMatched("trgValue = 2").delete() @@ -237,7 +237,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { "INSERT, UPDATE and DELETE cannot appear twice in one MERGE query") e = intercept[AnalysisException] { - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenMatched().updateExpr(Map("trgKey" -> "srcKey", "*" -> "*")) .whenNotMatched().insertExpr(Map("trgKey" -> "srcKey", "trgValue" -> "srcValue")) @@ -246,7 +246,7 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { errorContains(e.getMessage, "cannot resolve `*`") e = intercept[AnalysisException] { - io.delta.DeltaTable.forPath(spark, tempPath) + io.delta.tables.DeltaTable.forPath(spark, tempPath) .merge(source, "srcKey = trgKey") .whenMatched().updateExpr(Map("trgKey" -> "srcKey", "trgValue" -> "srcValue")) .whenNotMatched().insertExpr(Map("*" -> "*")) @@ -394,13 +394,13 @@ class MergeIntoScalaSuite extends MergeIntoSuiteBase { } } - private def makeDeltaTable(nameOrPath: String): io.delta.DeltaTable = { + private def makeDeltaTable(nameOrPath: String): DeltaTable = { val isPath: Boolean = nameOrPath.startsWith("delta.") if (isPath) { val path = nameOrPath.stripPrefix("delta.`").stripSuffix("`") - io.delta.DeltaTable.forPath(spark, path) + io.delta.tables.DeltaTable.forPath(spark, path) } else { - io.delta.DeltaTable(spark.table(nameOrPath)) + io.delta.tables.DeltaTable(spark.table(nameOrPath)) } } diff --git a/src/test/scala/org/apache/spark/sql/delta/UpdateScalaSuite.scala b/src/test/scala/org/apache/spark/sql/delta/UpdateScalaSuite.scala index 6fa1bddd158..7f7b7d42644 100644 --- a/src/test/scala/org/apache/spark/sql/delta/UpdateScalaSuite.scala +++ b/src/test/scala/org/apache/spark/sql/delta/UpdateScalaSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.delta import java.util.Locale -import io.delta.DeltaTable +import io.delta.tables.DeltaTable import org.apache.spark.sql.{functions, Row} @@ -28,7 +28,7 @@ class UpdateScalaSuite extends UpdateSuiteBase { test("update usage test - without condition") { append(Seq((1, 10), (2, 20), (3, 30), (4, 40)).toDF("key", "value")) - val table = io.delta.DeltaTable.forPath(tempPath) + val table = io.delta.tables.DeltaTable.forPath(tempPath) table.updateExpr(Map("key" -> "100")) checkAnswer(readDeltaTable(tempPath), Row(100, 10) :: Row(100, 20) :: Row(100, 30) :: Row(100, 40) :: Nil) @@ -36,7 +36,7 @@ class UpdateScalaSuite extends UpdateSuiteBase { test("update usage test - without condition, using Column") { append(Seq((1, 10), (2, 20), (3, 30), (4, 40)).toDF("key", "value")) - val table = io.delta.DeltaTable.forPath(tempPath) + val table = io.delta.tables.DeltaTable.forPath(tempPath) table.update(Map("key" -> functions.expr("100"))) checkAnswer(readDeltaTable(tempPath), Row(100, 10) :: Row(100, 20) :: Row(100, 30) :: Row(100, 40) :: Nil) @@ -44,7 +44,7 @@ class UpdateScalaSuite extends UpdateSuiteBase { test("update usage test - with condition") { append(Seq((1, 10), (2, 20), (3, 30), (4, 40)).toDF("key", "value")) - val table = io.delta.DeltaTable.forPath(tempPath) + val table = io.delta.tables.DeltaTable.forPath(tempPath) table.updateExpr("key = 1 or key = 2", Map("key" -> "100")) checkAnswer(readDeltaTable(tempPath), Row(100, 10) :: Row(100, 20) :: Row(3, 30) :: Row(4, 40) :: Nil) @@ -52,7 +52,7 @@ class UpdateScalaSuite extends UpdateSuiteBase { test("update usage test - with condition, using Column") { append(Seq((1, 10), (2, 20), (3, 30), (4, 40)).toDF("key", "value")) - val table = io.delta.DeltaTable.forPath(tempPath) + val table = io.delta.tables.DeltaTable.forPath(tempPath) table.update(functions.expr("key = 1 or key = 2"), Map("key" -> functions.expr("100"), "value" -> functions.expr("101"))) checkAnswer(readDeltaTable(tempPath), @@ -90,14 +90,14 @@ class UpdateScalaSuite extends UpdateSuiteBase { } } - val deltaTable: io.delta.DeltaTable = { + val deltaTable: io.delta.tables.DeltaTable = { val (tableNameOrPath, optionalAlias) = parse(target) val isPath: Boolean = tableNameOrPath.startsWith("delta.") val table = if (isPath) { val path = tableNameOrPath.stripPrefix("delta.`").stripSuffix("`") - io.delta.DeltaTable.forPath(spark, path) + io.delta.tables.DeltaTable.forPath(spark, path) } else { - io.delta.DeltaTable(spark.table(tableNameOrPath)) + io.delta.tables.DeltaTable(spark.table(tableNameOrPath)) } optionalAlias.map(table.as(_)).getOrElse(table) }