Skip to content

Commit

Permalink
merge conflict
Browse files Browse the repository at this point in the history
  • Loading branch information
huan233usc committed Feb 28, 2025
2 parents 24d1682 + 0fbe9b9 commit 1bdfdf0
Show file tree
Hide file tree
Showing 174 changed files with 8,080 additions and 4,127 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/connectors_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
distribution: 'zulu'
java-version: '8'
- name: Cache Scala, SBT
uses: actions/cache@v2
uses: actions/cache@v4
with:
path: |
~/.sbt
Expand Down
48 changes: 48 additions & 0 deletions .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Copyright (2025) The Delta Lake Project Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

align = none
align.openParenDefnSite = false
align.openParenCallSite = false
align.tokens = []
importSelectors = "singleLine"
optIn.configStyleArguments = false
continuationIndent {
callSite = 2
defnSite = 4
}
danglingParentheses {
defnSite = false
callSite = false
}
docstrings {
style = Asterisk
wrap = no
}
literals.hexDigits = upper
maxColumn = 100
newlines {
beforeCurlyLambdaParams = false
source = keep
}
rewrite.rules = [Imports]
rewrite.imports.sort = scalastyle
rewrite.imports.groups = [
["java\\..*"],
["scala\\..*"],
["io\\.delta\\..*"],
["org\\.apache\\.spark\\.sql\\.delta.*"]
]
runner.dialect = scala212
version = 3.8.6
79 changes: 49 additions & 30 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -157,11 +157,26 @@ lazy val commonSettings = Seq(
unidocSourceFilePatterns := Nil,
)

// enforce java code style
def javafmtCheckSettings() = Seq(
////////////////////////////
// START: Code Formatting //
////////////////////////////

/** Enforce java code style on compile. */
def javafmtCheckSettings(): Seq[Def.Setting[Task[CompileAnalysis]]] = Seq(
(Compile / compile) := ((Compile / compile) dependsOn (Compile / javafmtCheckAll)).value
)

/** Enforce scala code style on compile. */
def scalafmtCheckSettings(): Seq[Def.Setting[Task[CompileAnalysis]]] = Seq(
(Compile / compile) := ((Compile / compile) dependsOn (Compile / scalafmtCheckAll)).value,
)

// TODO: define fmtAll and fmtCheckAll tasks that run both scala and java fmts/checks

//////////////////////////
// END: Code Formatting //
//////////////////////////

/**
* Note: we cannot access sparkVersion.value here, since that can only be used within a task or
* setting macro.
Expand Down Expand Up @@ -233,7 +248,7 @@ def runTaskOnlyOnSparkMaster[T](
}

lazy val connectCommon = (project in file("spark-connect/common"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-connect-common",
commonSettings,
Expand Down Expand Up @@ -272,7 +287,7 @@ lazy val connectCommon = (project in file("spark-connect/common"))
)

lazy val connectClient = (project in file("spark-connect/client"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.dependsOn(connectCommon % "compile->compile;test->test;provided->provided")
.settings(
name := "delta-connect-client",
Expand Down Expand Up @@ -361,7 +376,7 @@ lazy val connectClient = (project in file("spark-connect/client"))
lazy val connectServer = (project in file("spark-connect/server"))
.dependsOn(connectCommon % "compile->compile;test->test;provided->provided")
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-connect-server",
commonSettings,
Expand Down Expand Up @@ -405,7 +420,7 @@ lazy val connectServer = (project in file("spark-connect/server"))
lazy val spark = (project in file("spark"))
.dependsOn(storage)
.enablePlugins(Antlr4Plugin)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-spark",
commonSettings,
Expand Down Expand Up @@ -493,7 +508,7 @@ lazy val spark = (project in file("spark"))

lazy val contribs = (project in file("contribs"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-contribs",
commonSettings,
Expand Down Expand Up @@ -532,7 +547,7 @@ lazy val contribs = (project in file("contribs"))

lazy val sharing = (project in file("sharing"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-sharing-spark",
commonSettings,
Expand All @@ -558,12 +573,14 @@ lazy val sharing = (project in file("sharing"))
).configureUnidoc()

lazy val kernelApi = (project in file("kernel/kernel-api"))
.enablePlugins(ScalafmtPlugin)
.settings(
name := "delta-kernel-api",
commonSettings,
scalaStyleSettings,
javaOnlyReleaseSettings,
javafmtCheckSettings,
scalafmtCheckSettings,
Test / javaOptions ++= Seq("-ea"),
libraryDependencies ++= Seq(
"org.roaringbitmap" % "RoaringBitmap" % "0.9.25",
Expand Down Expand Up @@ -638,6 +655,7 @@ lazy val kernelApi = (project in file("kernel/kernel-api"))
).configureUnidoc(docTitle = "Delta Kernel")

lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
.enablePlugins(ScalafmtPlugin)
.dependsOn(kernelApi)
.dependsOn(kernelApi % "test->test")
.dependsOn(storage)
Expand All @@ -650,6 +668,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
scalaStyleSettings,
javaOnlyReleaseSettings,
javafmtCheckSettings,
scalafmtCheckSettings,
Test / javaOptions ++= Seq("-ea"),
libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-client-runtime" % hadoopVersion,
Expand Down Expand Up @@ -682,7 +701,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
// TODO unidoc
// TODO(scott): figure out a better way to include tests in this project
lazy val storage = (project in file("storage"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-storage",
commonSettings,
Expand All @@ -707,7 +726,7 @@ lazy val storage = (project in file("storage"))
lazy val storageS3DynamoDB = (project in file("storage-s3-dynamodb"))
.dependsOn(storage % "compile->compile;test->test;provided->provided")
.dependsOn(spark % "test->test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-storage-s3-dynamodb",
commonSettings,
Expand All @@ -733,7 +752,7 @@ val icebergSparkRuntimeArtifactName = {
lazy val testDeltaIcebergJar = (project in file("testDeltaIcebergJar"))
// delta-iceberg depends on delta-spark! So, we need to include it during our test.
.dependsOn(spark % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "test-delta-iceberg-jar",
commonSettings,
Expand Down Expand Up @@ -763,7 +782,7 @@ val deltaIcebergSparkIncludePrefixes = Seq(
// scalastyle:off println
lazy val iceberg = (project in file("iceberg"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-iceberg",
commonSettings,
Expand Down Expand Up @@ -833,7 +852,7 @@ lazy val generateIcebergJarsTask = TaskKey[Unit]("generateIcebergJars", "Generat

lazy val icebergShaded = (project in file("icebergShaded"))
.dependsOn(spark % "provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "iceberg-shaded",
commonSettings,
Expand Down Expand Up @@ -864,7 +883,7 @@ lazy val icebergShaded = (project in file("icebergShaded"))

lazy val hudi = (project in file("hudi"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-hudi",
commonSettings,
Expand Down Expand Up @@ -916,7 +935,7 @@ lazy val hudi = (project in file("hudi"))

lazy val hive = (project in file("connectors/hive"))
.dependsOn(standaloneCosmetic)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-hive",
commonSettings,
Expand All @@ -933,7 +952,7 @@ lazy val hive = (project in file("connectors/hive"))

lazy val hiveAssembly = (project in file("connectors/hive-assembly"))
.dependsOn(hive)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-hive-assembly",
Compile / unmanagedJars += (hive / Compile / packageBin / packageBin).value,
Expand All @@ -960,7 +979,7 @@ lazy val hiveAssembly = (project in file("connectors/hive-assembly"))

lazy val hiveTest = (project in file("connectors/hive-test"))
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive-test",
// Make the project use the assembly jar to ensure we are testing the assembly jar that users
Expand Down Expand Up @@ -993,7 +1012,7 @@ lazy val hiveTest = (project in file("connectors/hive-test"))

lazy val hiveMR = (project in file("connectors/hive-mr"))
.dependsOn(hiveTest % "test->test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive-mr",
commonSettings,
Expand All @@ -1020,7 +1039,7 @@ lazy val hiveMR = (project in file("connectors/hive-mr"))

lazy val hiveTez = (project in file("connectors/hive-tez"))
.dependsOn(hiveTest % "test->test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive-tez",
commonSettings,
Expand Down Expand Up @@ -1064,7 +1083,7 @@ lazy val hiveTez = (project in file("connectors/hive-tez"))

lazy val hive2MR = (project in file("connectors/hive2-mr"))
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive2-mr",
commonSettings,
Expand Down Expand Up @@ -1095,7 +1114,7 @@ lazy val hive2MR = (project in file("connectors/hive2-mr"))

lazy val hive2Tez = (project in file("connectors/hive2-tez"))
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive2-tez",
commonSettings,
Expand Down Expand Up @@ -1162,7 +1181,7 @@ lazy val hive2Tez = (project in file("connectors/hive2-tez"))
*/
lazy val standaloneCosmetic = project
.dependsOn(storage) // this doesn't impact the output artifact (jar), only the pom.xml dependencies
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-standalone",
commonSettings,
Expand All @@ -1182,7 +1201,7 @@ lazy val standaloneCosmetic = project
lazy val testStandaloneCosmetic = (project in file("connectors/testStandaloneCosmetic"))
.dependsOn(standaloneCosmetic)
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "test-standalone-cosmetic",
commonSettings,
Expand All @@ -1199,7 +1218,7 @@ lazy val testStandaloneCosmetic = (project in file("connectors/testStandaloneCos
* except `ParquetSchemaConverter` are working without `parquet-hadoop` in testStandaloneCosmetic`.
*/
lazy val testParquetUtilsWithStandaloneCosmetic = project.dependsOn(standaloneCosmetic)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "test-parquet-utils-with-standalone-cosmetic",
commonSettings,
Expand All @@ -1223,7 +1242,7 @@ def scalaCollectionPar(version: String) = version match {
* create a separate project to skip the shading.
*/
lazy val standaloneParquet = (project in file("connectors/standalone-parquet"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.dependsOn(standaloneWithoutParquetUtils)
.settings(
name := "delta-standalone-parquet",
Expand All @@ -1238,7 +1257,7 @@ lazy val standaloneParquet = (project in file("connectors/standalone-parquet"))

/** A dummy project to allow `standaloneParquet` depending on the shaded standalone jar. */
lazy val standaloneWithoutParquetUtils = project
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-standalone-without-parquet-utils",
commonSettings,
Expand All @@ -1251,7 +1270,7 @@ lazy val standaloneWithoutParquetUtils = project
lazy val standalone = (project in file("connectors/standalone"))
.dependsOn(storage % "compile->compile;provided->provided")
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-standalone-original",
commonSettings,
Expand Down Expand Up @@ -1376,7 +1395,7 @@ lazy val compatibility = (project in file("connectors/oss-compatibility-tests"))

lazy val goldenTables = (project in file("connectors/golden-tables"))
.dependsOn(spark % "test") // depends on delta-spark
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "golden-tables",
commonSettings,
Expand All @@ -1403,7 +1422,7 @@ def sqlDeltaImportScalaVersion(scalaBinaryVersion: String): String = {

lazy val sqlDeltaImport = (project in file("connectors/sql-delta-import"))
.dependsOn(spark)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "sql-delta-import",
commonSettings,
Expand Down Expand Up @@ -1435,7 +1454,7 @@ lazy val flink = (project in file("connectors/flink"))
.dependsOn(standaloneCosmetic % "provided")
.dependsOn(kernelApi)
.dependsOn(kernelDefaults)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-flink",
commonSettings,
Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"txnId":"bda32d72-442d-4705-9a8c-16093eb31744","tableSizeBytes":452,"numFiles":1,"numMetadata":1,"numProtocol":1,"setTransactions":[],"domainMetadata":[],"metadata":{"id":"da00fe29-8b6e-4f3b-b91f-a3729283bc1a","format":{"provider":"parquet","options":{}},"schemaString":"{\"type\":\"struct\",\"fields\":[{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"month\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}","partitionColumns":["month"],"configuration":{"delta.enableChangeDataFeed":"true"},"createdTime":1740185389028},"protocol":{"minReaderVersion":1,"minWriterVersion":7,"writerFeatures":["changeDataFeed","appendOnly","invariants"]},"allFiles":[{"path":"month=1/part-00000-22d25ea7-a383-44df-ad22-6b06d871b547.c000.snappy.parquet","partitionValues":{"month":"1"},"size":452,"modificationTime":1740185390672,"dataChange":false,"stats":"{\"numRecords\":1,\"minValues\":{\"id\":1},\"maxValues\":{\"id\":1},\"nullCount\":{\"id\":0}}"}]}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{"commitInfo":{"timestamp":1740185390903,"operation":"CREATE TABLE AS SELECT","operationParameters":{"partitionBy":"[\"month\"]","clusterBy":"[]","description":null,"isManaged":"false","properties":"{\"delta.enableChangeDataFeed\":\"true\"}"},"isolationLevel":"Serializable","isBlindAppend":true,"operationMetrics":{"numFiles":"1","numOutputRows":"1","numOutputBytes":"452"},"engineInfo":"Apache-Spark/3.5.3 Delta-Lake/3.4.0-SNAPSHOT","txnId":"bda32d72-442d-4705-9a8c-16093eb31744"}}
{"metaData":{"id":"da00fe29-8b6e-4f3b-b91f-a3729283bc1a","format":{"provider":"parquet","options":{}},"schemaString":"{\"type\":\"struct\",\"fields\":[{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"month\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}","partitionColumns":["month"],"configuration":{"delta.enableChangeDataFeed":"true"},"createdTime":1740185389028}}
{"protocol":{"minReaderVersion":1,"minWriterVersion":7,"writerFeatures":["changeDataFeed","appendOnly","invariants"]}}
{"add":{"path":"month=1/part-00000-22d25ea7-a383-44df-ad22-6b06d871b547.c000.snappy.parquet","partitionValues":{"month":"1"},"size":452,"modificationTime":1740185390672,"dataChange":true,"stats":"{\"numRecords\":1,\"minValues\":{\"id\":1},\"maxValues\":{\"id\":1},\"nullCount\":{\"id\":0}}"}}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"txnId":"0d7d28b8-55c2-4d8b-b48e-88b22c90aed1","tableSizeBytes":904,"numFiles":2,"numMetadata":1,"numProtocol":1,"setTransactions":[],"domainMetadata":[],"metadata":{"id":"da00fe29-8b6e-4f3b-b91f-a3729283bc1a","format":{"provider":"parquet","options":{}},"schemaString":"{\"type\":\"struct\",\"fields\":[{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"month\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}","partitionColumns":["month"],"configuration":{"delta.enableChangeDataFeed":"true"},"createdTime":1740185389028},"protocol":{"minReaderVersion":1,"minWriterVersion":7,"writerFeatures":["changeDataFeed","appendOnly","invariants"]},"allFiles":[{"path":"month=2/part-00000-cc2a9650-0450-4879-9757-873b7f544510.c000.snappy.parquet","partitionValues":{"month":"2"},"size":452,"modificationTime":1740185395663,"dataChange":false,"stats":"{\"numRecords\":1,\"minValues\":{\"id\":2},\"maxValues\":{\"id\":2},\"nullCount\":{\"id\":0}}"},{"path":"month=1/part-00000-22d25ea7-a383-44df-ad22-6b06d871b547.c000.snappy.parquet","partitionValues":{"month":"1"},"size":452,"modificationTime":1740185390672,"dataChange":false,"stats":"{\"numRecords\":1,\"minValues\":{\"id\":1},\"maxValues\":{\"id\":1},\"nullCount\":{\"id\":0}}"}]}
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{"commitInfo":{"timestamp":1740185395669,"operation":"WRITE","operationParameters":{"mode":"Append","partitionBy":"[]"},"readVersion":0,"isolationLevel":"Serializable","isBlindAppend":true,"operationMetrics":{"numFiles":"1","numOutputRows":"1","numOutputBytes":"452"},"engineInfo":"Apache-Spark/3.5.3 Delta-Lake/3.4.0-SNAPSHOT","txnId":"0d7d28b8-55c2-4d8b-b48e-88b22c90aed1"}}
{"add":{"path":"month=2/part-00000-cc2a9650-0450-4879-9757-873b7f544510.c000.snappy.parquet","partitionValues":{"month":"2"},"size":452,"modificationTime":1740185395663,"dataChange":true,"stats":"{\"numRecords\":1,\"minValues\":{\"id\":2},\"maxValues\":{\"id\":2},\"nullCount\":{\"id\":0}}"}}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"txnId":"79b3e3aa-82dc-4c18-b95e-8b50089b55c7","tableSizeBytes":904,"numFiles":2,"numMetadata":1,"numProtocol":1,"setTransactions":[],"domainMetadata":[],"metadata":{"id":"da00fe29-8b6e-4f3b-b91f-a3729283bc1a","format":{"provider":"parquet","options":{}},"schemaString":"{\"type\":\"struct\",\"fields\":[{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"month\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}","partitionColumns":["month"],"configuration":{"delta.enableChangeDataFeed":"true"},"createdTime":1740185389028},"protocol":{"minReaderVersion":1,"minWriterVersion":7,"writerFeatures":["changeDataFeed","appendOnly","invariants"]},"allFiles":[{"path":"month=2/part-00000-129a0441-5f41-4e46-be33-fd0289e53614.c000.snappy.parquet","partitionValues":{"month":"2"},"size":452,"modificationTime":1740185397380,"dataChange":false,"stats":"{\"numRecords\":1,\"minValues\":{\"id\":2},\"maxValues\":{\"id\":2},\"nullCount\":{\"id\":0}}"},{"path":"month=1/part-00000-c5babbd8-6013-484c-818f-22d546976866.c000.snappy.parquet","partitionValues":{"month":"1"},"size":452,"modificationTime":1740185397384,"dataChange":false,"stats":"{\"numRecords\":1,\"minValues\":{\"id\":1},\"maxValues\":{\"id\":1},\"nullCount\":{\"id\":0}}"}]}
Loading

0 comments on commit 1bdfdf0

Please sign in to comment.