Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BUILD] Configure scalafmt. Currently, this applies only to Kernel. #4160

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
align = none
align.openParenDefnSite = false
align.openParenCallSite = false
align.tokens = []
importSelectors = "singleLine"
optIn.configStyleArguments = false
continuationIndent {
callSite = 2
defnSite = 4
}
danglingParentheses {
defnSite = false
callSite = false
}
docstrings {
style = Asterisk
wrap = no
}
literals.hexDigits = upper
maxColumn = 100
newlines {
beforeCurlyLambdaParams = false
source = keep
}
runner.dialect = scala212
version = 3.8.6
79 changes: 49 additions & 30 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -157,11 +157,26 @@ lazy val commonSettings = Seq(
unidocSourceFilePatterns := Nil,
)

// enforce java code style
def javafmtCheckSettings() = Seq(
////////////////////////////
// START: Code Formatting //
////////////////////////////

/** Enforce java code style on compile. */
def javafmtCheckSettings(): Seq[Def.Setting[Task[CompileAnalysis]]] = Seq(
(Compile / compile) := ((Compile / compile) dependsOn (Compile / javafmtCheckAll)).value
)

/** Enforce scala code style on compile. */
def scalafmtCheckSettings(): Seq[Def.Setting[Task[CompileAnalysis]]] = Seq(
(Compile / compile) := ((Compile / compile) dependsOn (Compile / scalafmtCheckAll)).value,
)

// TODO: define fmtAll and fmtCheckAll tasks that run both scala and java fmts/checks

//////////////////////////
// END: Code Formatting //
//////////////////////////

/**
* Note: we cannot access sparkVersion.value here, since that can only be used within a task or
* setting macro.
Expand Down Expand Up @@ -237,7 +252,7 @@ def runTaskOnlyOnSparkMaster[T](
}

lazy val connectCommon = (project in file("spark-connect/common"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-connect-common",
commonSettings,
Expand Down Expand Up @@ -276,7 +291,7 @@ lazy val connectCommon = (project in file("spark-connect/common"))
)

lazy val connectClient = (project in file("spark-connect/client"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.dependsOn(connectCommon % "compile->compile;test->test;provided->provided")
.settings(
name := "delta-connect-client",
Expand Down Expand Up @@ -365,7 +380,7 @@ lazy val connectClient = (project in file("spark-connect/client"))
lazy val connectServer = (project in file("spark-connect/server"))
.dependsOn(connectCommon % "compile->compile;test->test;provided->provided")
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-connect-server",
commonSettings,
Expand Down Expand Up @@ -409,7 +424,7 @@ lazy val connectServer = (project in file("spark-connect/server"))
lazy val spark = (project in file("spark"))
.dependsOn(storage)
.enablePlugins(Antlr4Plugin)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-spark",
commonSettings,
Expand Down Expand Up @@ -497,7 +512,7 @@ lazy val spark = (project in file("spark"))

lazy val contribs = (project in file("contribs"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-contribs",
commonSettings,
Expand Down Expand Up @@ -536,7 +551,7 @@ lazy val contribs = (project in file("contribs"))

lazy val sharing = (project in file("sharing"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-sharing-spark",
commonSettings,
Expand All @@ -562,12 +577,14 @@ lazy val sharing = (project in file("sharing"))
).configureUnidoc()

lazy val kernelApi = (project in file("kernel/kernel-api"))
.enablePlugins(ScalafmtPlugin)
.settings(
name := "delta-kernel-api",
commonSettings,
scalaStyleSettings,
javaOnlyReleaseSettings,
javafmtCheckSettings,
scalafmtCheckSettings,
Test / javaOptions ++= Seq("-ea"),
libraryDependencies ++= Seq(
"org.roaringbitmap" % "RoaringBitmap" % "0.9.25",
Expand Down Expand Up @@ -642,6 +659,7 @@ lazy val kernelApi = (project in file("kernel/kernel-api"))
).configureUnidoc(docTitle = "Delta Kernel")

lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
.enablePlugins(ScalafmtPlugin)
.dependsOn(kernelApi)
.dependsOn(kernelApi % "test->test")
.dependsOn(storage)
Expand All @@ -654,6 +672,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
scalaStyleSettings,
javaOnlyReleaseSettings,
javafmtCheckSettings,
scalafmtCheckSettings,
Test / javaOptions ++= Seq("-ea"),
libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-client-runtime" % hadoopVersion,
Expand Down Expand Up @@ -686,7 +705,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
// TODO unidoc
// TODO(scott): figure out a better way to include tests in this project
lazy val storage = (project in file("storage"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-storage",
commonSettings,
Expand All @@ -711,7 +730,7 @@ lazy val storage = (project in file("storage"))
lazy val storageS3DynamoDB = (project in file("storage-s3-dynamodb"))
.dependsOn(storage % "compile->compile;test->test;provided->provided")
.dependsOn(spark % "test->test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-storage-s3-dynamodb",
commonSettings,
Expand All @@ -737,7 +756,7 @@ val icebergSparkRuntimeArtifactName = {
lazy val testDeltaIcebergJar = (project in file("testDeltaIcebergJar"))
// delta-iceberg depends on delta-spark! So, we need to include it during our test.
.dependsOn(spark % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "test-delta-iceberg-jar",
commonSettings,
Expand Down Expand Up @@ -767,7 +786,7 @@ val deltaIcebergSparkIncludePrefixes = Seq(
// scalastyle:off println
lazy val iceberg = (project in file("iceberg"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-iceberg",
commonSettings,
Expand Down Expand Up @@ -837,7 +856,7 @@ lazy val generateIcebergJarsTask = TaskKey[Unit]("generateIcebergJars", "Generat

lazy val icebergShaded = (project in file("icebergShaded"))
.dependsOn(spark % "provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "iceberg-shaded",
commonSettings,
Expand Down Expand Up @@ -868,7 +887,7 @@ lazy val icebergShaded = (project in file("icebergShaded"))

lazy val hudi = (project in file("hudi"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-hudi",
commonSettings,
Expand Down Expand Up @@ -920,7 +939,7 @@ lazy val hudi = (project in file("hudi"))

lazy val hive = (project in file("connectors/hive"))
.dependsOn(standaloneCosmetic)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-hive",
commonSettings,
Expand All @@ -937,7 +956,7 @@ lazy val hive = (project in file("connectors/hive"))

lazy val hiveAssembly = (project in file("connectors/hive-assembly"))
.dependsOn(hive)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-hive-assembly",
Compile / unmanagedJars += (hive / Compile / packageBin / packageBin).value,
Expand All @@ -964,7 +983,7 @@ lazy val hiveAssembly = (project in file("connectors/hive-assembly"))

lazy val hiveTest = (project in file("connectors/hive-test"))
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive-test",
// Make the project use the assembly jar to ensure we are testing the assembly jar that users
Expand Down Expand Up @@ -997,7 +1016,7 @@ lazy val hiveTest = (project in file("connectors/hive-test"))

lazy val hiveMR = (project in file("connectors/hive-mr"))
.dependsOn(hiveTest % "test->test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive-mr",
commonSettings,
Expand All @@ -1024,7 +1043,7 @@ lazy val hiveMR = (project in file("connectors/hive-mr"))

lazy val hiveTez = (project in file("connectors/hive-tez"))
.dependsOn(hiveTest % "test->test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive-tez",
commonSettings,
Expand Down Expand Up @@ -1068,7 +1087,7 @@ lazy val hiveTez = (project in file("connectors/hive-tez"))

lazy val hive2MR = (project in file("connectors/hive2-mr"))
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive2-mr",
commonSettings,
Expand Down Expand Up @@ -1099,7 +1118,7 @@ lazy val hive2MR = (project in file("connectors/hive2-mr"))

lazy val hive2Tez = (project in file("connectors/hive2-tez"))
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive2-tez",
commonSettings,
Expand Down Expand Up @@ -1166,7 +1185,7 @@ lazy val hive2Tez = (project in file("connectors/hive2-tez"))
*/
lazy val standaloneCosmetic = project
.dependsOn(storage) // this doesn't impact the output artifact (jar), only the pom.xml dependencies
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-standalone",
commonSettings,
Expand All @@ -1186,7 +1205,7 @@ lazy val standaloneCosmetic = project
lazy val testStandaloneCosmetic = (project in file("connectors/testStandaloneCosmetic"))
.dependsOn(standaloneCosmetic)
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "test-standalone-cosmetic",
commonSettings,
Expand All @@ -1203,7 +1222,7 @@ lazy val testStandaloneCosmetic = (project in file("connectors/testStandaloneCos
* except `ParquetSchemaConverter` are working without `parquet-hadoop` in testStandaloneCosmetic`.
*/
lazy val testParquetUtilsWithStandaloneCosmetic = project.dependsOn(standaloneCosmetic)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "test-parquet-utils-with-standalone-cosmetic",
commonSettings,
Expand All @@ -1227,7 +1246,7 @@ def scalaCollectionPar(version: String) = version match {
* create a separate project to skip the shading.
*/
lazy val standaloneParquet = (project in file("connectors/standalone-parquet"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.dependsOn(standaloneWithoutParquetUtils)
.settings(
name := "delta-standalone-parquet",
Expand All @@ -1242,7 +1261,7 @@ lazy val standaloneParquet = (project in file("connectors/standalone-parquet"))

/** A dummy project to allow `standaloneParquet` depending on the shaded standalone jar. */
lazy val standaloneWithoutParquetUtils = project
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-standalone-without-parquet-utils",
commonSettings,
Expand All @@ -1255,7 +1274,7 @@ lazy val standaloneWithoutParquetUtils = project
lazy val standalone = (project in file("connectors/standalone"))
.dependsOn(storage % "compile->compile;provided->provided")
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-standalone-original",
commonSettings,
Expand Down Expand Up @@ -1380,7 +1399,7 @@ lazy val compatibility = (project in file("connectors/oss-compatibility-tests"))

lazy val goldenTables = (project in file("connectors/golden-tables"))
.dependsOn(spark % "test") // depends on delta-spark
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "golden-tables",
commonSettings,
Expand All @@ -1407,7 +1426,7 @@ def sqlDeltaImportScalaVersion(scalaBinaryVersion: String): String = {

lazy val sqlDeltaImport = (project in file("connectors/sql-delta-import"))
.dependsOn(spark)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "sql-delta-import",
commonSettings,
Expand Down Expand Up @@ -1439,7 +1458,7 @@ lazy val flink = (project in file("connectors/flink"))
.dependsOn(standaloneCosmetic % "provided")
.dependsOn(kernelApi)
.dependsOn(kernelDefaults)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-flink",
commonSettings,
Expand Down
3 changes: 3 additions & 0 deletions kernel/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,9 @@ We welcome contributions to Delta Lake and we accept contributions via Pull Requ
Java code adheres to the [Google style](https://google.github.io/styleguide/javaguide.html), which is verified via `build/sbt javafmtCheckAll` during builds.
In order to automatically fix Java code style issues, please use `build/sbt javafmtAll`.

Scala code adheres to the styled defined in the root `.scalafmt.conf` file, which is verified via `build/sbt scalafmtCheckAll` during builds. You should configure your IDE to use the `Scalafmt` formatter and point it to the right configuration file.
In order to automatically fix Scala code style issues, please use `build/sbt scalafmtAll`.

## Configuring Code Formatter for Eclipse/IntelliJ

Follow the instructions for [Eclipse](https://github.com/google/google-java-format#eclipse) or
Expand Down
Loading