From 5b3f7bca8df52a4f146b2dba6d1cc80e642bdd0c Mon Sep 17 00:00:00 2001 From: Adam Binford Date: Tue, 24 Sep 2024 10:03:13 -0400 Subject: [PATCH] fix: CometScanExec on Spark 3.5.2 (#915) --- .../spark/sql/comet/CometScanExec.scala | 26 +++++++++++- .../q1/explain.txt | 12 +++--- .../q10/explain.txt | 16 ++++---- .../q11/explain.txt | 24 +++++------ .../q12/explain.txt | 8 ++-- .../q13/explain.txt | 14 +++---- .../q14a/explain.txt | 34 ++++++++-------- .../q14b/explain.txt | 40 +++++++++---------- .../q15/explain.txt | 10 ++--- .../q16/explain.txt | 12 +++--- .../q17/explain.txt | 18 ++++----- .../q18/explain.txt | 16 ++++---- .../q19/explain.txt | 12 +++--- .../q2/explain.txt | 10 ++--- .../q20/explain.txt | 8 ++-- .../q21/explain.txt | 10 ++--- .../q22/explain.txt | 10 ++--- .../q23a/explain.txt | 26 ++++++------ .../q23b/explain.txt | 28 ++++++------- .../q24a/explain.txt | 14 +++---- .../q24b/explain.txt | 14 +++---- .../q25/explain.txt | 18 ++++----- .../q26/explain.txt | 12 +++--- .../q27/explain.txt | 12 +++--- .../q28/explain.txt | 12 +++--- .../q29/explain.txt | 22 +++++----- .../q3/explain.txt | 6 +-- .../q30/explain.txt | 14 +++---- .../q31/explain.txt | 26 ++++++------ .../q32/explain.txt | 10 ++--- .../q33/explain.txt | 16 ++++---- .../q34/explain.txt | 12 +++--- .../q35/explain.txt | 16 ++++---- .../q36/explain.txt | 10 ++--- .../q37/explain.txt | 10 ++--- .../q38/explain.txt | 12 +++--- .../q39a/explain.txt | 16 ++++---- .../q39b/explain.txt | 16 ++++---- .../q4/explain.txt | 32 +++++++-------- .../q40/explain.txt | 12 +++--- .../q41/explain.txt | 4 +- .../q42/explain.txt | 6 +-- .../q43/explain.txt | 6 +-- .../q44/explain.txt | 6 +-- .../q45/explain.txt | 14 +++---- .../q46/explain.txt | 14 +++---- .../q47/explain.txt | 10 ++--- .../q48/explain.txt | 12 +++--- .../q49/explain.txt | 16 ++++---- .../q5/explain.txt | 24 +++++------ .../q50/explain.txt | 12 +++--- .../q51/explain.txt | 8 ++-- .../q52/explain.txt | 6 +-- .../q53/explain.txt | 10 ++--- .../q54/explain.txt | 26 ++++++------ .../q55/explain.txt | 6 +-- .../q56/explain.txt | 16 ++++---- .../q57/explain.txt | 10 ++--- .../q58/explain.txt | 16 ++++---- .../q59/explain.txt | 12 +++--- .../q6/explain.txt | 16 ++++---- .../q60/explain.txt | 16 ++++---- .../q61/explain.txt | 18 ++++----- .../q62/explain.txt | 10 ++--- .../q63/explain.txt | 10 ++--- .../q64/explain.txt | 38 +++++++++--------- .../q65/explain.txt | 12 +++--- .../q66/explain.txt | 14 +++---- .../q67/explain.txt | 10 ++--- .../q68/explain.txt | 14 +++---- .../q69/explain.txt | 16 ++++---- .../q7/explain.txt | 12 +++--- .../q70/explain.txt | 12 +++--- .../q71/explain.txt | 14 +++---- .../q72/explain.txt | 24 +++++------ .../q73/explain.txt | 12 +++--- .../q74/explain.txt | 24 +++++------ .../q75/explain.txt | 28 ++++++------- .../q76/explain.txt | 10 ++--- .../q77/explain.txt | 20 +++++----- .../q78/explain.txt | 16 ++++---- .../q79/explain.txt | 12 +++--- .../q8/explain.txt | 14 +++---- .../q80/explain.txt | 26 ++++++------ .../q81/explain.txt | 14 +++---- .../q82/explain.txt | 10 ++--- .../q83/explain.txt | 16 ++++---- .../q84/explain.txt | 12 +++--- .../q85/explain.txt | 18 ++++----- .../q86/explain.txt | 8 ++-- .../q87/explain.txt | 12 +++--- .../q88/explain.txt | 36 ++++++++--------- .../q89/explain.txt | 10 ++--- .../q9/explain.txt | 12 +++--- .../q90/explain.txt | 12 +++--- .../q91/explain.txt | 16 ++++---- .../q92/explain.txt | 10 ++--- .../q93/explain.txt | 6 +-- .../q94/explain.txt | 12 +++--- .../q95/explain.txt | 12 +++--- .../q96/explain.txt | 8 ++-- .../q97/explain.txt | 8 ++-- .../q98/explain.txt | 8 ++-- .../q99/explain.txt | 10 ++--- .../q1/explain.txt | 12 +++--- .../q10/explain.txt | 16 ++++---- .../q11/explain.txt | 24 +++++------ .../q12/explain.txt | 8 ++-- .../q13/explain.txt | 14 +++---- .../q14a/explain.txt | 34 ++++++++-------- .../q14b/explain.txt | 40 +++++++++---------- .../q15/explain.txt | 10 ++--- .../q16/explain.txt | 12 +++--- .../q17/explain.txt | 18 ++++----- .../q18/explain.txt | 16 ++++---- .../q19/explain.txt | 12 +++--- .../q2/explain.txt | 10 ++--- .../q20/explain.txt | 8 ++-- .../q21/explain.txt | 10 ++--- .../q22/explain.txt | 10 ++--- .../q23a/explain.txt | 24 +++++------ .../q23b/explain.txt | 26 ++++++------ .../q24a/explain.txt | 14 +++---- .../q24b/explain.txt | 14 +++---- .../q25/explain.txt | 18 ++++----- .../q26/explain.txt | 12 +++--- .../q27/explain.txt | 12 +++--- .../q28/explain.txt | 12 +++--- .../q29/explain.txt | 22 +++++----- .../q3/explain.txt | 6 +-- .../q30/explain.txt | 14 +++---- .../q31/explain.txt | 26 ++++++------ .../q32/explain.txt | 10 ++--- .../q33/explain.txt | 16 ++++---- .../q34/explain.txt | 12 +++--- .../q35/explain.txt | 16 ++++---- .../q36/explain.txt | 10 ++--- .../q37/explain.txt | 10 ++--- .../q38/explain.txt | 12 +++--- .../q39a/explain.txt | 16 ++++---- .../q39b/explain.txt | 16 ++++---- .../q4/explain.txt | 32 +++++++-------- .../q40/explain.txt | 12 +++--- .../q41/explain.txt | 4 +- .../q42/explain.txt | 6 +-- .../q43/explain.txt | 6 +-- .../q44/explain.txt | 6 +-- .../q45/explain.txt | 14 +++---- .../q46/explain.txt | 16 ++++---- .../q47/explain.txt | 10 ++--- .../q48/explain.txt | 12 +++--- .../q49/explain.txt | 16 ++++---- .../q5/explain.txt | 24 +++++------ .../q50/explain.txt | 12 +++--- .../q51/explain.txt | 8 ++-- .../q52/explain.txt | 6 +-- .../q53/explain.txt | 10 ++--- .../q54/explain.txt | 26 ++++++------ .../q55/explain.txt | 6 +-- .../q56/explain.txt | 16 ++++---- .../q57/explain.txt | 10 ++--- .../q58/explain.txt | 16 ++++---- .../q59/explain.txt | 12 +++--- .../q6/explain.txt | 16 ++++---- .../q60/explain.txt | 16 ++++---- .../q61/explain.txt | 18 ++++----- .../q62/explain.txt | 10 ++--- .../q63/explain.txt | 10 ++--- .../q64/explain.txt | 34 ++++++++-------- .../q65/explain.txt | 12 +++--- .../q66/explain.txt | 14 +++---- .../q67/explain.txt | 10 ++--- .../q68/explain.txt | 16 ++++---- .../q69/explain.txt | 16 ++++---- .../q7/explain.txt | 12 +++--- .../q70/explain.txt | 12 +++--- .../q71/explain.txt | 14 +++---- .../q72/explain.txt | 24 +++++------ .../q73/explain.txt | 12 +++--- .../q74/explain.txt | 24 +++++------ .../q75/explain.txt | 28 ++++++------- .../q76/explain.txt | 10 ++--- .../q77/explain.txt | 20 +++++----- .../q78/explain.txt | 16 ++++---- .../q79/explain.txt | 12 +++--- .../q8/explain.txt | 14 +++---- .../q80/explain.txt | 26 ++++++------ .../q81/explain.txt | 14 +++---- .../q82/explain.txt | 10 ++--- .../q83.ansi/explain.txt | 16 ++++---- .../q84/explain.txt | 12 +++--- .../q85/explain.txt | 18 ++++----- .../q86/explain.txt | 8 ++-- .../q87/explain.txt | 12 +++--- .../q88/explain.txt | 36 ++++++++--------- .../q89/explain.txt | 10 ++--- .../q9/explain.txt | 12 +++--- .../q90/explain.txt | 12 +++--- .../q91/explain.txt | 16 ++++---- .../q92/explain.txt | 10 ++--- .../q93/explain.txt | 6 +-- .../q94/explain.txt | 12 +++--- .../q95/explain.txt | 12 +++--- .../q96/explain.txt | 8 ++-- .../q97/explain.txt | 8 ++-- .../q98/explain.txt | 8 ++-- .../q99/explain.txt | 10 ++--- .../approved-plans-v1_4/q1/explain.txt | 12 +++--- .../approved-plans-v1_4/q10/explain.txt | 16 ++++---- .../approved-plans-v1_4/q11/explain.txt | 24 +++++------ .../approved-plans-v1_4/q12/explain.txt | 8 ++-- .../approved-plans-v1_4/q13/explain.txt | 14 +++---- .../approved-plans-v1_4/q14a/explain.txt | 34 ++++++++-------- .../approved-plans-v1_4/q14b/explain.txt | 40 +++++++++---------- .../approved-plans-v1_4/q15/explain.txt | 10 ++--- .../approved-plans-v1_4/q16/explain.txt | 12 +++--- .../approved-plans-v1_4/q17/explain.txt | 18 ++++----- .../approved-plans-v1_4/q18/explain.txt | 16 ++++---- .../approved-plans-v1_4/q19/explain.txt | 12 +++--- .../approved-plans-v1_4/q2/explain.txt | 10 ++--- .../approved-plans-v1_4/q20/explain.txt | 8 ++-- .../approved-plans-v1_4/q21/explain.txt | 10 ++--- .../approved-plans-v1_4/q22/explain.txt | 10 ++--- .../approved-plans-v1_4/q23a/explain.txt | 26 ++++++------ .../approved-plans-v1_4/q23b/explain.txt | 28 ++++++------- .../approved-plans-v1_4/q24a/explain.txt | 14 +++---- .../approved-plans-v1_4/q24b/explain.txt | 14 +++---- .../approved-plans-v1_4/q25/explain.txt | 18 ++++----- .../approved-plans-v1_4/q26/explain.txt | 12 +++--- .../approved-plans-v1_4/q27/explain.txt | 12 +++--- .../approved-plans-v1_4/q28/explain.txt | 12 +++--- .../approved-plans-v1_4/q29/explain.txt | 22 +++++----- .../approved-plans-v1_4/q3/explain.txt | 6 +-- .../approved-plans-v1_4/q30/explain.txt | 14 +++---- .../approved-plans-v1_4/q31/explain.txt | 26 ++++++------ .../approved-plans-v1_4/q32/explain.txt | 10 ++--- .../approved-plans-v1_4/q33/explain.txt | 16 ++++---- .../approved-plans-v1_4/q34/explain.txt | 12 +++--- .../approved-plans-v1_4/q35/explain.txt | 16 ++++---- .../approved-plans-v1_4/q36/explain.txt | 10 ++--- .../approved-plans-v1_4/q37/explain.txt | 10 ++--- .../approved-plans-v1_4/q38/explain.txt | 12 +++--- .../approved-plans-v1_4/q39a/explain.txt | 16 ++++---- .../approved-plans-v1_4/q39b/explain.txt | 16 ++++---- .../approved-plans-v1_4/q4/explain.txt | 32 +++++++-------- .../approved-plans-v1_4/q40/explain.txt | 12 +++--- .../approved-plans-v1_4/q41/explain.txt | 4 +- .../approved-plans-v1_4/q42/explain.txt | 6 +-- .../approved-plans-v1_4/q43/explain.txt | 6 +-- .../approved-plans-v1_4/q44/explain.txt | 6 +-- .../approved-plans-v1_4/q45/explain.txt | 14 +++---- .../approved-plans-v1_4/q46/explain.txt | 14 +++---- .../approved-plans-v1_4/q47/explain.txt | 10 ++--- .../approved-plans-v1_4/q48/explain.txt | 12 +++--- .../approved-plans-v1_4/q49/explain.txt | 16 ++++---- .../approved-plans-v1_4/q5/explain.txt | 24 +++++------ .../approved-plans-v1_4/q50/explain.txt | 12 +++--- .../approved-plans-v1_4/q51/explain.txt | 8 ++-- .../approved-plans-v1_4/q52/explain.txt | 6 +-- .../approved-plans-v1_4/q53/explain.txt | 10 ++--- .../approved-plans-v1_4/q54/explain.txt | 26 ++++++------ .../approved-plans-v1_4/q55/explain.txt | 6 +-- .../approved-plans-v1_4/q56/explain.txt | 16 ++++---- .../approved-plans-v1_4/q57/explain.txt | 10 ++--- .../approved-plans-v1_4/q58/explain.txt | 16 ++++---- .../approved-plans-v1_4/q59/explain.txt | 12 +++--- .../approved-plans-v1_4/q6/explain.txt | 16 ++++---- .../approved-plans-v1_4/q60/explain.txt | 16 ++++---- .../approved-plans-v1_4/q61/explain.txt | 18 ++++----- .../approved-plans-v1_4/q62/explain.txt | 10 ++--- .../approved-plans-v1_4/q63/explain.txt | 10 ++--- .../approved-plans-v1_4/q64/explain.txt | 38 +++++++++--------- .../approved-plans-v1_4/q65/explain.txt | 12 +++--- .../approved-plans-v1_4/q66/explain.txt | 14 +++---- .../approved-plans-v1_4/q67/explain.txt | 10 ++--- .../approved-plans-v1_4/q68/explain.txt | 14 +++---- .../approved-plans-v1_4/q69/explain.txt | 16 ++++---- .../approved-plans-v1_4/q7/explain.txt | 12 +++--- .../approved-plans-v1_4/q70/explain.txt | 12 +++--- .../approved-plans-v1_4/q71/explain.txt | 14 +++---- .../approved-plans-v1_4/q72/explain.txt | 24 +++++------ .../approved-plans-v1_4/q73/explain.txt | 12 +++--- .../approved-plans-v1_4/q74/explain.txt | 24 +++++------ .../approved-plans-v1_4/q75/explain.txt | 28 ++++++------- .../approved-plans-v1_4/q76/explain.txt | 10 ++--- .../approved-plans-v1_4/q77/explain.txt | 20 +++++----- .../approved-plans-v1_4/q78/explain.txt | 16 ++++---- .../approved-plans-v1_4/q79/explain.txt | 12 +++--- .../approved-plans-v1_4/q8/explain.txt | 14 +++---- .../approved-plans-v1_4/q80/explain.txt | 26 ++++++------ .../approved-plans-v1_4/q81/explain.txt | 14 +++---- .../approved-plans-v1_4/q82/explain.txt | 10 ++--- .../approved-plans-v1_4/q83/explain.txt | 16 ++++---- .../approved-plans-v1_4/q84/explain.txt | 12 +++--- .../approved-plans-v1_4/q85/explain.txt | 18 ++++----- .../approved-plans-v1_4/q86/explain.txt | 8 ++-- .../approved-plans-v1_4/q87/explain.txt | 12 +++--- .../approved-plans-v1_4/q88/explain.txt | 36 ++++++++--------- .../approved-plans-v1_4/q89/explain.txt | 10 ++--- .../approved-plans-v1_4/q9/explain.txt | 12 +++--- .../approved-plans-v1_4/q90/explain.txt | 12 +++--- .../approved-plans-v1_4/q91/explain.txt | 16 ++++---- .../approved-plans-v1_4/q92/explain.txt | 10 ++--- .../approved-plans-v1_4/q93/explain.txt | 6 +-- .../approved-plans-v1_4/q94/explain.txt | 12 +++--- .../approved-plans-v1_4/q95/explain.txt | 12 +++--- .../approved-plans-v1_4/q96/explain.txt | 8 ++-- .../approved-plans-v1_4/q97/explain.txt | 8 ++-- .../approved-plans-v1_4/q98/explain.txt | 8 ++-- .../approved-plans-v1_4/q99/explain.txt | 10 ++--- .../q10a/explain.txt | 16 ++++---- .../q11/explain.txt | 24 +++++------ .../q12/explain.txt | 8 ++-- .../q14/explain.txt | 40 +++++++++---------- .../q14a/explain.txt | 38 +++++++++--------- .../q18a/explain.txt | 32 +++++++-------- .../q20/explain.txt | 8 ++-- .../q22/explain.txt | 10 ++--- .../q22a/explain.txt | 10 ++--- .../q24/explain.txt | 14 +++---- .../q27a/explain.txt | 20 +++++----- .../q34/explain.txt | 12 +++--- .../q35/explain.txt | 16 ++++---- .../q35a/explain.txt | 16 ++++---- .../q36a/explain.txt | 10 ++--- .../q47/explain.txt | 10 ++--- .../q49/explain.txt | 16 ++++---- .../q51a/explain.txt | 8 ++-- .../q57/explain.txt | 10 ++--- .../q5a/explain.txt | 24 +++++------ .../q6/explain.txt | 16 ++++---- .../q64/explain.txt | 38 +++++++++--------- .../q67a/explain.txt | 10 ++--- .../q70a/explain.txt | 12 +++--- .../q72/explain.txt | 24 +++++------ .../q74/explain.txt | 24 +++++------ .../q75/explain.txt | 28 ++++++------- .../q77a/explain.txt | 20 +++++----- .../q78/explain.txt | 16 ++++---- .../q80a/explain.txt | 26 ++++++------ .../q86a/explain.txt | 8 ++-- .../q98/explain.txt | 8 ++-- .../q10a/explain.txt | 16 ++++---- .../q11/explain.txt | 24 +++++------ .../q12/explain.txt | 8 ++-- .../q14/explain.txt | 40 +++++++++---------- .../q14a/explain.txt | 38 +++++++++--------- .../q18a/explain.txt | 32 +++++++-------- .../q20/explain.txt | 8 ++-- .../q22/explain.txt | 10 ++--- .../q22a/explain.txt | 10 ++--- .../q24/explain.txt | 14 +++---- .../q27a/explain.txt | 20 +++++----- .../q34/explain.txt | 12 +++--- .../q35/explain.txt | 16 ++++---- .../q35a/explain.txt | 16 ++++---- .../q36a/explain.txt | 10 ++--- .../q47/explain.txt | 10 ++--- .../q49/explain.txt | 16 ++++---- .../q51a/explain.txt | 8 ++-- .../q57/explain.txt | 10 ++--- .../q5a/explain.txt | 24 +++++------ .../q6/explain.txt | 16 ++++---- .../q64/explain.txt | 34 ++++++++-------- .../q67a/explain.txt | 10 ++--- .../q70a/explain.txt | 12 +++--- .../q72/explain.txt | 24 +++++------ .../q74/explain.txt | 24 +++++------ .../q75/explain.txt | 28 ++++++------- .../q77a/explain.txt | 20 +++++----- .../q78/explain.txt | 16 ++++---- .../q80a/explain.txt | 26 ++++++------ .../q86a/explain.txt | 8 ++-- .../q98/explain.txt | 8 ++-- .../approved-plans-v2_7/q10a/explain.txt | 16 ++++---- .../approved-plans-v2_7/q11/explain.txt | 24 +++++------ .../approved-plans-v2_7/q12/explain.txt | 8 ++-- .../approved-plans-v2_7/q14/explain.txt | 40 +++++++++---------- .../approved-plans-v2_7/q14a/explain.txt | 38 +++++++++--------- .../approved-plans-v2_7/q18a/explain.txt | 32 +++++++-------- .../approved-plans-v2_7/q20/explain.txt | 8 ++-- .../approved-plans-v2_7/q22/explain.txt | 10 ++--- .../approved-plans-v2_7/q22a/explain.txt | 10 ++--- .../approved-plans-v2_7/q24/explain.txt | 14 +++---- .../approved-plans-v2_7/q27a/explain.txt | 20 +++++----- .../approved-plans-v2_7/q34/explain.txt | 12 +++--- .../approved-plans-v2_7/q35/explain.txt | 16 ++++---- .../approved-plans-v2_7/q35a/explain.txt | 16 ++++---- .../approved-plans-v2_7/q36a/explain.txt | 10 ++--- .../approved-plans-v2_7/q47/explain.txt | 10 ++--- .../approved-plans-v2_7/q49/explain.txt | 16 ++++---- .../approved-plans-v2_7/q51a/explain.txt | 8 ++-- .../approved-plans-v2_7/q57/explain.txt | 10 ++--- .../approved-plans-v2_7/q5a/explain.txt | 24 +++++------ .../approved-plans-v2_7/q6/explain.txt | 16 ++++---- .../approved-plans-v2_7/q64/explain.txt | 38 +++++++++--------- .../approved-plans-v2_7/q67a/explain.txt | 10 ++--- .../approved-plans-v2_7/q70a/explain.txt | 12 +++--- .../approved-plans-v2_7/q72/explain.txt | 24 +++++------ .../approved-plans-v2_7/q74/explain.txt | 24 +++++------ .../approved-plans-v2_7/q75/explain.txt | 28 ++++++------- .../approved-plans-v2_7/q77a/explain.txt | 20 +++++----- .../approved-plans-v2_7/q78/explain.txt | 16 ++++---- .../approved-plans-v2_7/q80a/explain.txt | 26 ++++++------ .../approved-plans-v2_7/q86a/explain.txt | 8 ++-- .../approved-plans-v2_7/q98/explain.txt | 8 ++-- 406 files changed, 3131 insertions(+), 3109 deletions(-) diff --git a/spark/src/main/scala/org/apache/spark/sql/comet/CometScanExec.scala b/spark/src/main/scala/org/apache/spark/sql/comet/CometScanExec.scala index 2cbe8961a..49f7694bc 100644 --- a/spark/src/main/scala/org/apache/spark/sql/comet/CometScanExec.scala +++ b/spark/src/main/scala/org/apache/spark/sql/comet/CometScanExec.scala @@ -141,8 +141,30 @@ case class CometScanExec( if (wrapped == null) Map.empty else wrapped.metadata override def verboseStringWithOperatorId(): String = { - getTagValue(QueryPlan.OP_ID_TAG).foreach(id => wrapped.setTagValue(QueryPlan.OP_ID_TAG, id)) - wrapped.verboseStringWithOperatorId() + val metadataStr = metadata.toSeq.sorted + .filterNot { + case (_, value) if (value.isEmpty || value.equals("[]")) => true + case (key, _) if (key.equals("DataFilters") || key.equals("Format")) => true + case (_, _) => false + } + .map { + case (key, _) if (key.equals("Location")) => + val location = relation.location + val numPaths = location.rootPaths.length + val abbreviatedLocation = if (numPaths <= 1) { + location.rootPaths.mkString("[", ", ", "]") + } else { + "[" + location.rootPaths.head + s", ... ${numPaths - 1} entries]" + } + s"$key: ${location.getClass.getSimpleName} ${redact(abbreviatedLocation)}" + case (key, value) => s"$key: ${redact(value)}" + } + + s""" + |$formattedNodeName + |${ExplainUtils.generateFieldString("Output", output)} + |${metadataStr.mkString("\n")} + |""".stripMargin } lazy val inputRDD: RDD[InternalRow] = { diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q1/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q1/explain.txt index 707707a74..ccca28151 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q1/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q1/explain.txt @@ -41,7 +41,7 @@ +- CometScan parquet spark_catalog.default.customer (34) -(1) Scan parquet spark_catalog.default.store_returns +(1) CometScan parquet spark_catalog.default.store_returns Output [4]: [sr_customer_sk#1, sr_store_sk#2, sr_return_amt#3, sr_returned_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -53,7 +53,7 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -104,7 +104,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#12, ws_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -136,7 +136,7 @@ Right keys [1]: [ws_bill_customer_sk#12] Join type: ExistenceJoin(exists#2) Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -176,7 +176,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(29) Scan parquet spark_catalog.default.customer_address +(29) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_county#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -208,7 +208,7 @@ Join condition: None Output [1]: [c_current_cdemo_sk#4] Input [3]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#20] -(36) Scan parquet spark_catalog.default.customer_demographics +(36) CometScan parquet spark_catalog.default.customer_demographics Output [9]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_education_status#25, cd_purchase_estimate#26, cd_credit_rating#27, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -268,7 +268,7 @@ BroadcastExchange (50) +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q11/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q11/explain.txt index 0346efa54..609ab39c8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q11/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q11/explain.txt @@ -70,7 +70,7 @@ +- ReusedExchange (59) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -81,7 +81,7 @@ ReadSchema: struct 0.00)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#19, c_customer_id#20, c_first_name#21, c_last_name#22, c_preferred_cust_flag#23, c_birth_country#24, c_login#25, c_email_address#26] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -159,7 +159,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [ws_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q13/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q13/explain.txt index 7d035666a..e3c357b40 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q13/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q13/explain.txt @@ -34,7 +34,7 @@ +- CometScan parquet spark_catalog.default.household_demographics (25) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [10]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -46,7 +46,7 @@ ReadSchema: struct= 100.00) AND (ss_net_profit#9 <= 200.00)) OR ((ss_net_profit#9 >= 150.00) AND (ss_net_profit#9 <= 300.00))) OR ((ss_net_profit#9 >= 50.00) AND (ss_net_profit#9 <= 250.00)))) AND ((((ss_sales_price#6 >= 100.00) AND (ss_sales_price#6 <= 150.00)) OR ((ss_sales_price#6 >= 50.00) AND (ss_sales_price#6 <= 100.00))) OR ((ss_sales_price#6 >= 150.00) AND (ss_sales_price#6 <= 200.00)))) -(3) Scan parquet spark_catalog.default.store +(3) CometScan parquet spark_catalog.default.store Output [1]: [s_store_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -70,7 +70,7 @@ Arguments: [ss_store_sk#4], [s_store_sk#12], Inner, BuildRight Input [11]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10, s_store_sk#12] Arguments: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10], [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10] -(8) Scan parquet spark_catalog.default.customer_address +(8) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#13, ca_state#14, ca_country#15] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -98,7 +98,7 @@ Arguments: [ss_addr_sk#3], [ca_address_sk#13], Inner, ((((ca_state#14 IN (TX,OH) Input [11]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10, ca_address_sk#13, ca_state#14] Arguments: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10], [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10] -(14) Scan parquet spark_catalog.default.date_dim +(14) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -126,7 +126,7 @@ Arguments: [ss_sold_date_sk#10], [d_date_sk#16], Inner, BuildRight Input [8]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10, d_date_sk#16] Arguments: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8], [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8] -(20) Scan parquet spark_catalog.default.customer_demographics +(20) CometScan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#18, cd_marital_status#19, cd_education_status#20] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -150,7 +150,7 @@ Arguments: [ss_cdemo_sk#1], [cd_demo_sk#18], Inner, ((((((cd_marital_status#19 = Input [9]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_demo_sk#18, cd_marital_status#19, cd_education_status#20] Arguments: [ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_marital_status#19, cd_education_status#20], [ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_marital_status#19, cd_education_status#20] -(25) Scan parquet spark_catalog.default.household_demographics +(25) CometScan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#21, hd_dep_count#22] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -201,7 +201,7 @@ BroadcastExchange (38) +- CometScan parquet spark_catalog.default.date_dim (34) -(34) Scan parquet spark_catalog.default.date_dim +(34) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q14a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q14a/explain.txt index c758aeb6f..bd6aa9273 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q14a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q14a/explain.txt @@ -103,7 +103,7 @@ +- ReusedExchange (88) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -115,7 +115,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -126,7 +126,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -149,7 +149,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -185,7 +185,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -261,7 +261,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -328,7 +328,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -360,7 +360,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -410,7 +410,7 @@ Condition : (isnotnull(sales#49) AND (cast(sales#49 as decimal(32,6)) > cast(Sub Input [5]: [i_brand_id#40, i_class_id#41, i_category_id#42, sales#49, number_sales#50] Arguments: [sales#49, number_sales#50, channel#53, i_brand_id#54, i_class_id#55, i_category_id#56], [sales#49, number_sales#50, store AS channel#53, i_brand_id#40 AS i_brand_id#54, i_class_id#41 AS i_class_id#55, i_category_id#42 AS i_category_id#56] -(66) Scan parquet spark_catalog.default.catalog_sales +(66) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#57, cs_quantity#58, cs_list_price#59, cs_sold_date_sk#60] Batched: true Location: InMemoryFileIndex [] @@ -476,7 +476,7 @@ Condition : (isnotnull(sales#71) AND (cast(sales#71 as decimal(32,6)) > cast(Reu Input [5]: [i_brand_id#64, i_class_id#65, i_category_id#66, sales#71, number_sales#72] Arguments: [sales#71, number_sales#72, channel#73, i_brand_id#64, i_class_id#65, i_category_id#66], [sales#71, number_sales#72, catalog AS channel#73, i_brand_id#64, i_class_id#65, i_category_id#66] -(81) Scan parquet spark_catalog.default.web_sales +(81) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#74, ws_quantity#75, ws_list_price#76, ws_sold_date_sk#77] Batched: true Location: InMemoryFileIndex [] @@ -594,7 +594,7 @@ Subquery:1 Hosting operator id = 64 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (112) -(103) Scan parquet spark_catalog.default.store_sales +(103) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#101, ss_list_price#102, ss_sold_date_sk#103] Batched: true Location: InMemoryFileIndex [] @@ -613,7 +613,7 @@ Arguments: [ss_sold_date_sk#103], [d_date_sk#105], Inner, BuildRight Input [4]: [ss_quantity#101, ss_list_price#102, ss_sold_date_sk#103, d_date_sk#105] Arguments: [quantity#106, list_price#107], [ss_quantity#101 AS quantity#106, ss_list_price#102 AS list_price#107] -(107) Scan parquet spark_catalog.default.catalog_sales +(107) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#108, cs_list_price#109, cs_sold_date_sk#110] Batched: true Location: InMemoryFileIndex [] @@ -632,7 +632,7 @@ Arguments: [cs_sold_date_sk#110], [d_date_sk#112], Inner, BuildRight Input [4]: [cs_quantity#108, cs_list_price#109, cs_sold_date_sk#110, d_date_sk#112] Arguments: [quantity#113, list_price#114], [cs_quantity#108 AS quantity#113, cs_list_price#109 AS list_price#114] -(111) Scan parquet spark_catalog.default.web_sales +(111) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#115, ws_list_price#116, ws_sold_date_sk#117] Batched: true Location: InMemoryFileIndex [] @@ -687,7 +687,7 @@ BroadcastExchange (124) +- CometScan parquet spark_catalog.default.date_dim (120) -(120) Scan parquet spark_catalog.default.date_dim +(120) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -717,7 +717,7 @@ BroadcastExchange (129) +- CometScan parquet spark_catalog.default.date_dim (125) -(125) Scan parquet spark_catalog.default.date_dim +(125) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#125] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q14b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q14b/explain.txt index 95ed68f76..4126fed5c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q14b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q14b/explain.txt @@ -86,7 +86,7 @@ +- CometScan parquet spark_catalog.default.date_dim (72) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -109,7 +109,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -132,7 +132,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -168,7 +168,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -244,7 +244,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -311,7 +311,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -343,7 +343,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -389,7 +389,7 @@ Functions [2]: [sum((cast(ss_quantity#2 as decimal(10,0)) * ss_list_price#3)), c Input [6]: [channel#50, i_brand_id#40, i_class_id#41, i_category_id#42, sales#51, number_sales#52] Condition : (isnotnull(sales#51) AND (cast(sales#51 as decimal(32,6)) > cast(Subquery scalar-subquery#53, [id=#54] as decimal(32,6)))) -(65) Scan parquet spark_catalog.default.store_sales +(65) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#55, ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -421,7 +421,7 @@ Arguments: [ss_item_sk#55], [i_item_sk#61], Inner, BuildRight Input [8]: [ss_item_sk#55, ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_item_sk#61, i_brand_id#62, i_class_id#63, i_category_id#64] Arguments: [ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_brand_id#62, i_class_id#63, i_category_id#64], [ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_brand_id#62, i_class_id#63, i_category_id#64] -(72) Scan parquet spark_catalog.default.date_dim +(72) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#65, d_week_seq#66] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -505,7 +505,7 @@ Subquery:1 Hosting operator id = 64 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (95) -(86) Scan parquet spark_catalog.default.store_sales +(86) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#75, ss_list_price#76, ss_sold_date_sk#77] Batched: true Location: InMemoryFileIndex [] @@ -524,7 +524,7 @@ Arguments: [ss_sold_date_sk#77], [d_date_sk#79], Inner, BuildRight Input [4]: [ss_quantity#75, ss_list_price#76, ss_sold_date_sk#77, d_date_sk#79] Arguments: [quantity#80, list_price#81], [ss_quantity#75 AS quantity#80, ss_list_price#76 AS list_price#81] -(90) Scan parquet spark_catalog.default.catalog_sales +(90) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#82, cs_list_price#83, cs_sold_date_sk#84] Batched: true Location: InMemoryFileIndex [] @@ -543,7 +543,7 @@ Arguments: [cs_sold_date_sk#84], [d_date_sk#86], Inner, BuildRight Input [4]: [cs_quantity#82, cs_list_price#83, cs_sold_date_sk#84, d_date_sk#86] Arguments: [quantity#87, list_price#88], [cs_quantity#82 AS quantity#87, cs_list_price#83 AS list_price#88] -(94) Scan parquet spark_catalog.default.web_sales +(94) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#89, ws_list_price#90, ws_sold_date_sk#91] Batched: true Location: InMemoryFileIndex [] @@ -598,7 +598,7 @@ BroadcastExchange (107) +- CometScan parquet spark_catalog.default.date_dim (103) -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -627,7 +627,7 @@ Subquery:6 Hosting operator id = 104 Hosting Expression = Subquery scalar-subque +- CometScan parquet spark_catalog.default.date_dim (108) -(108) Scan parquet spark_catalog.default.date_dim +(108) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#99, d_year#100, d_moy#101, d_dom#102] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -653,7 +653,7 @@ BroadcastExchange (116) +- CometScan parquet spark_catalog.default.date_dim (112) -(112) Scan parquet spark_catalog.default.date_dim +(112) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#103] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -691,7 +691,7 @@ BroadcastExchange (121) +- CometScan parquet spark_catalog.default.date_dim (117) -(117) Scan parquet spark_catalog.default.date_dim +(117) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#65, d_week_seq#66] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -720,7 +720,7 @@ Subquery:13 Hosting operator id = 118 Hosting Expression = Subquery scalar-subqu +- CometScan parquet spark_catalog.default.date_dim (122) -(122) Scan parquet spark_catalog.default.date_dim +(122) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#104, d_year#105, d_moy#106, d_dom#107] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q15/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q15/explain.txt index 842c3e576..037c52626 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q15/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q15/explain.txt @@ -24,7 +24,7 @@ +- CometScan parquet spark_catalog.default.date_dim (13) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -36,7 +36,7 @@ ReadSchema: struct Input [3]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3] Condition : isnotnull(cs_bill_customer_sk#1) -(3) Scan parquet spark_catalog.default.customer +(3) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#5, c_current_addr_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -60,7 +60,7 @@ Arguments: [cs_bill_customer_sk#1], [c_customer_sk#5], Inner, BuildRight Input [5]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3, c_customer_sk#5, c_current_addr_sk#6] Arguments: [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6], [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6] -(8) Scan parquet spark_catalog.default.customer_address +(8) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#7, ca_state#8, ca_zip#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -84,7 +84,7 @@ Arguments: [c_current_addr_sk#6], [ca_address_sk#7], Inner, ((substr(ca_zip#9, 1 Input [6]: [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6, ca_address_sk#7, ca_state#8, ca_zip#9] Arguments: [cs_sales_price#2, cs_sold_date_sk#3, ca_zip#9], [cs_sales_price#2, cs_sold_date_sk#3, ca_zip#9] -(13) Scan parquet spark_catalog.default.date_dim +(13) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#11, d_qoy#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -143,7 +143,7 @@ BroadcastExchange (28) +- CometScan parquet spark_catalog.default.date_dim (24) -(24) Scan parquet spark_catalog.default.date_dim +(24) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#11, d_qoy#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q16/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q16/explain.txt index 0f3c217d5..acd12b277 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q16/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q16/explain.txt @@ -41,7 +41,7 @@ +- CometScan parquet spark_catalog.default.call_center (29) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [8]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, cs_sold_date_sk#8] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -64,7 +64,7 @@ Arguments: hashpartitioning(cs_order_number#5, 5), ENSURE_REQUIREMENTS, CometNat Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Arguments: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_order_number#5 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.catalog_sales +(6) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_warehouse_sk#9, cs_order_number#10, cs_sold_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -91,7 +91,7 @@ Arguments: [cs_order_number#5], [cs_order_number#10], LeftSemi, NOT (cs_warehous Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Arguments: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(12) Scan parquet spark_catalog.default.catalog_returns +(12) CometScan parquet spark_catalog.default.catalog_returns Output [2]: [cr_order_number#12, cr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -114,7 +114,7 @@ Left output [6]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_ Right output [1]: [cr_order_number#12] Arguments: [cs_order_number#5], [cr_order_number#12], LeftAnti -(17) Scan parquet spark_catalog.default.date_dim +(17) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -142,7 +142,7 @@ Arguments: [cs_ship_date_sk#1], [d_date_sk#14], Inner, BuildRight Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, d_date_sk#14] Arguments: [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(23) Scan parquet spark_catalog.default.customer_address +(23) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_state#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -170,7 +170,7 @@ Arguments: [cs_ship_addr_sk#2], [ca_address_sk#16], Inner, BuildRight Input [6]: [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, ca_address_sk#16] Arguments: [cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(29) Scan parquet spark_catalog.default.call_center +(29) CometScan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#18, cc_county#19] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q17/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q17/explain.txt index 69f720e6f..496ec2f1a 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q17/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q17/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.item (33) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_quantity#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -55,7 +55,7 @@ ReadSchema: struct Input [2]: [ws_ext_sales_price#1, ws_sold_date_sk#2] Arguments: [sold_date_sk#3, sales_price#4], [ws_sold_date_sk#2 AS sold_date_sk#3, ws_ext_sales_price#1 AS sales_price#4] -(3) Scan parquet spark_catalog.default.catalog_sales +(3) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ext_sales_price#5, cs_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [sold_date_sk#7, sales_price#8], [cs_sold_date_sk#6 AS sold_date_sk#7 Child 0 Input [2]: [sold_date_sk#3, sales_price#4] Child 1 Input [2]: [sold_date_sk#7, sales_price#8] -(6) Scan parquet spark_catalog.default.date_dim +(6) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_week_seq#10, d_day_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -98,7 +98,7 @@ Input [8]: [d_week_seq#10, sum#12, sum#13, sum#14, sum#15, sum#16, sum#17, sum#1 Keys [1]: [d_week_seq#10] Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Monday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Tuesday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Wednesday) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Thursday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Friday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Saturday ) THEN sales_price#4 END))] -(14) Scan parquet spark_catalog.default.date_dim +(14) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_week_seq#19, d_year#20] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ Input [8]: [d_week_seq#36, sum#37, sum#38, sum#39, sum#40, sum#41, sum#42, sum#4 Keys [1]: [d_week_seq#36] Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#44 = Sunday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#44 = Monday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#44 = Tuesday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#44 = Wednesday) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#44 = Thursday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#44 = Friday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#44 = Saturday ) THEN sales_price#4 END))] -(22) Scan parquet spark_catalog.default.date_dim +(22) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_week_seq#45, d_year#46] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q20/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q20/explain.txt index 1af8f20f3..909acbf7f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q20/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q20/explain.txt @@ -23,7 +23,7 @@ TakeOrderedAndProject (22) +- CometScan parquet spark_catalog.default.date_dim (8) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -35,7 +35,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Condition : isnotnull(cs_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [cs_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q21/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q21/explain.txt index e432933e5..1bcad35c3 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q21/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q21/explain.txt @@ -25,7 +25,7 @@ +- CometScan parquet spark_catalog.default.date_dim (14) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ ReadSchema: struct -(2) Scan parquet spark_catalog.default.store_sales +(2) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -87,7 +87,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#7, ss_sold_date_sk#8] Condition : isnotnull(ss_item_sk#7) -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -115,7 +115,7 @@ Arguments: [ss_sold_date_sk#8], [d_date_sk#10], Inner, BuildRight Input [4]: [ss_item_sk#7, ss_sold_date_sk#8, d_date_sk#10, d_date#11] Arguments: [ss_item_sk#7, d_date#11], [ss_item_sk#7, d_date#11] -(10) Scan parquet spark_catalog.default.item +(10) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#13, i_item_desc#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -182,7 +182,7 @@ Arguments: hashpartitioning(cs_bill_customer_sk#1, 5), ENSURE_REQUIREMENTS, Come Input [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Arguments: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5], [cs_bill_customer_sk#1 ASC NULLS FIRST] -(25) Scan parquet spark_catalog.default.store_sales +(25) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -197,7 +197,7 @@ Condition : isnotnull(ss_customer_sk#19) Input [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Arguments: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21], [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21] -(28) Scan parquet spark_catalog.default.customer +(28) CometScan parquet spark_catalog.default.customer Output [1]: [c_customer_sk#23] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -256,7 +256,7 @@ Arguments: [cs_bill_customer_sk#1], [c_customer_sk#23], LeftSemi Input [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Arguments: [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5], [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] -(41) Scan parquet spark_catalog.default.date_dim +(41) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#29, d_year#30, d_moy#31] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -284,7 +284,7 @@ Arguments: [cs_sold_date_sk#5], [d_date_sk#29], Inner, BuildRight Input [4]: [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5, d_date_sk#29] Arguments: [sales#32], [(cast(cs_quantity#3 as decimal(10,0)) * cs_list_price#4) AS sales#32] -(47) Scan parquet spark_catalog.default.web_sales +(47) CometScan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#33, ws_bill_customer_sk#34, ws_quantity#35, ws_list_price#36, ws_sold_date_sk#37] Batched: true Location: InMemoryFileIndex [] @@ -383,7 +383,7 @@ BroadcastExchange (72) +- CometScan parquet spark_catalog.default.date_dim (68) -(68) Scan parquet spark_catalog.default.date_dim +(68) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#29, d_year#30, d_moy#31] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -413,7 +413,7 @@ BroadcastExchange (77) +- CometScan parquet spark_catalog.default.date_dim (73) -(73) Scan parquet spark_catalog.default.date_dim +(73) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -456,7 +456,7 @@ Subquery:3 Hosting operator id = 36 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (83) -(78) Scan parquet spark_catalog.default.store_sales +(78) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#51, ss_quantity#52, ss_sales_price#53, ss_sold_date_sk#54] Batched: true Location: InMemoryFileIndex [] @@ -480,7 +480,7 @@ Arguments: [ss_customer_sk#51], [c_customer_sk#56], Inner, BuildRight Input [5]: [ss_customer_sk#51, ss_quantity#52, ss_sales_price#53, ss_sold_date_sk#54, c_customer_sk#56] Arguments: [ss_quantity#52, ss_sales_price#53, ss_sold_date_sk#54, c_customer_sk#56], [ss_quantity#52, ss_sales_price#53, ss_sold_date_sk#54, c_customer_sk#56] -(83) Scan parquet spark_catalog.default.date_dim +(83) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#57, d_year#58] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -547,7 +547,7 @@ BroadcastExchange (100) +- CometScan parquet spark_catalog.default.date_dim (96) -(96) Scan parquet spark_catalog.default.date_dim +(96) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#57, d_year#58] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q23b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q23b/explain.txt index 4de12b9b3..b9d5b25fd 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q23b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q23b/explain.txt @@ -88,7 +88,7 @@ +- ReusedExchange (79) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [5]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#7, ss_sold_date_sk#8] Condition : isnotnull(ss_item_sk#7) -(5) Scan parquet spark_catalog.default.date_dim +(5) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -140,7 +140,7 @@ Arguments: [ss_sold_date_sk#8], [d_date_sk#10], Inner, BuildRight Input [4]: [ss_item_sk#7, ss_sold_date_sk#8, d_date_sk#10, d_date#11] Arguments: [ss_item_sk#7, d_date#11], [ss_item_sk#7, d_date#11] -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#13, i_item_desc#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -207,7 +207,7 @@ Arguments: hashpartitioning(cs_bill_customer_sk#1, 5), ENSURE_REQUIREMENTS, Come Input [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Arguments: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5], [cs_bill_customer_sk#1 ASC NULLS FIRST] -(26) Scan parquet spark_catalog.default.store_sales +(26) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -222,7 +222,7 @@ Condition : isnotnull(ss_customer_sk#19) Input [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Arguments: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21], [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21] -(29) Scan parquet spark_catalog.default.customer +(29) CometScan parquet spark_catalog.default.customer Output [1]: [c_customer_sk#23] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -277,7 +277,7 @@ Left output [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold Right output [1]: [c_customer_sk#23] Arguments: [cs_bill_customer_sk#1], [c_customer_sk#23], LeftSemi -(41) Scan parquet spark_catalog.default.customer +(41) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#29, c_first_name#30, c_last_name#31] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -334,7 +334,7 @@ Arguments: [cs_bill_customer_sk#1], [c_customer_sk#29], Inner, BuildRight Input [7]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5, c_customer_sk#29, c_first_name#30, c_last_name#31] Arguments: [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5, c_first_name#30, c_last_name#31], [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5, c_first_name#30, c_last_name#31] -(54) Scan parquet spark_catalog.default.date_dim +(54) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#32, d_year#33, d_moy#34] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -376,7 +376,7 @@ Input [4]: [c_last_name#31, c_first_name#30, sum#35, isEmpty#36] Keys [2]: [c_last_name#31, c_first_name#30] Functions [1]: [sum((cast(cs_quantity#3 as decimal(10,0)) * cs_list_price#4))] -(63) Scan parquet spark_catalog.default.web_sales +(63) CometScan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#37, ws_bill_customer_sk#38, ws_quantity#39, ws_list_price#40, ws_sold_date_sk#41] Batched: true Location: InMemoryFileIndex [] @@ -492,7 +492,7 @@ BroadcastExchange (92) +- CometScan parquet spark_catalog.default.date_dim (88) -(88) Scan parquet spark_catalog.default.date_dim +(88) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#32, d_year#33, d_moy#34] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -522,7 +522,7 @@ BroadcastExchange (97) +- CometScan parquet spark_catalog.default.date_dim (93) -(93) Scan parquet spark_catalog.default.date_dim +(93) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -565,7 +565,7 @@ Subquery:3 Hosting operator id = 37 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (103) -(98) Scan parquet spark_catalog.default.store_sales +(98) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#58, ss_quantity#59, ss_sales_price#60, ss_sold_date_sk#61] Batched: true Location: InMemoryFileIndex [] @@ -589,7 +589,7 @@ Arguments: [ss_customer_sk#58], [c_customer_sk#63], Inner, BuildRight Input [5]: [ss_customer_sk#58, ss_quantity#59, ss_sales_price#60, ss_sold_date_sk#61, c_customer_sk#63] Arguments: [ss_quantity#59, ss_sales_price#60, ss_sold_date_sk#61, c_customer_sk#63], [ss_quantity#59, ss_sales_price#60, ss_sold_date_sk#61, c_customer_sk#63] -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#64, d_year#65] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -656,7 +656,7 @@ BroadcastExchange (120) +- CometScan parquet spark_catalog.default.date_dim (116) -(116) Scan parquet spark_catalog.default.date_dim +(116) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#64, d_year#65] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q24a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q24a/explain.txt index 3094c4e07..9dbf4af83 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q24a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q24a/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.customer_address (30) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -66,7 +66,7 @@ Arguments: hashpartitioning(ss_ticket_number#4, ss_item_sk#1, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5], [ss_ticket_number#4 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.store_returns +(6) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#7, sr_ticket_number#8, sr_returned_date_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -98,7 +98,7 @@ Arguments: [ss_ticket_number#4, ss_item_sk#1], [sr_ticket_number#8, sr_item_sk#7 Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, sr_item_sk#7, sr_ticket_number#8] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5], [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [5]: [s_store_sk#10, s_store_name#11, s_market_id#12, s_state#13, s_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -126,7 +126,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#10], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14], [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -150,7 +150,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#15], Inner, BuildRight Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Arguments: [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20], [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] -(24) Scan parquet spark_catalog.default.customer +(24) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#21, c_first_name#22, c_last_name#23, c_birth_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -177,7 +177,7 @@ Arguments: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_pric (29) ColumnarToRow [codegen id : 2] Input [12]: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20, c_first_name#22, c_last_name#23, c_birth_country#24] -(30) Scan parquet spark_catalog.default.customer_address +(30) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_state#25, ca_zip#26, ca_country#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -312,7 +312,7 @@ Arguments: [ss_store_sk#42], [s_store_sk#47], Inner, BuildRight Input [8]: [ss_item_sk#40, ss_customer_sk#41, ss_store_sk#42, ss_net_paid#44, s_store_sk#47, s_store_name#48, s_state#49, s_zip#50] Arguments: [ss_item_sk#40, ss_customer_sk#41, ss_net_paid#44, s_store_name#48, s_state#49, s_zip#50], [ss_item_sk#40, ss_customer_sk#41, ss_net_paid#44, s_store_name#48, s_state#49, s_zip#50] -(52) Scan parquet spark_catalog.default.item +(52) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#51, i_current_price#52, i_size#53, i_color#54, i_units#55, i_manager_id#56] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q24b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q24b/explain.txt index 846c7a14a..f27ae4019 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q24b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q24b/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.customer_address (30) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -66,7 +66,7 @@ Arguments: hashpartitioning(ss_ticket_number#4, ss_item_sk#1, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5], [ss_ticket_number#4 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.store_returns +(6) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#7, sr_ticket_number#8, sr_returned_date_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -98,7 +98,7 @@ Arguments: [ss_ticket_number#4, ss_item_sk#1], [sr_ticket_number#8, sr_item_sk#7 Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, sr_item_sk#7, sr_ticket_number#8] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5], [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [5]: [s_store_sk#10, s_store_name#11, s_market_id#12, s_state#13, s_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -126,7 +126,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#10], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14], [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -150,7 +150,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#15], Inner, BuildRight Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Arguments: [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20], [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] -(24) Scan parquet spark_catalog.default.customer +(24) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#21, c_first_name#22, c_last_name#23, c_birth_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -177,7 +177,7 @@ Arguments: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_pric (29) ColumnarToRow [codegen id : 2] Input [12]: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20, c_first_name#22, c_last_name#23, c_birth_country#24] -(30) Scan parquet spark_catalog.default.customer_address +(30) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_state#25, ca_zip#26, ca_country#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -312,7 +312,7 @@ Arguments: [ss_store_sk#42], [s_store_sk#47], Inner, BuildRight Input [8]: [ss_item_sk#40, ss_customer_sk#41, ss_store_sk#42, ss_net_paid#44, s_store_sk#47, s_store_name#48, s_state#49, s_zip#50] Arguments: [ss_item_sk#40, ss_customer_sk#41, ss_net_paid#44, s_store_name#48, s_state#49, s_zip#50], [ss_item_sk#40, ss_customer_sk#41, ss_net_paid#44, s_store_name#48, s_state#49, s_zip#50] -(52) Scan parquet spark_catalog.default.item +(52) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#51, i_current_price#52, i_size#53, i_color#54, i_units#55, i_manager_id#56] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q25/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q25/explain.txt index 0d47bdcf1..6c7833548 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q25/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q25/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.item (33) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_profit#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -55,7 +55,7 @@ ReadSchema: struct Input [3]: [ss_addr_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_addr_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -127,7 +127,7 @@ Arguments: [ss_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [6]: [ss_addr_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, d_date_sk#5, d_year#6, d_qoy#7] Arguments: [ss_addr_sk#1, ss_ext_sales_price#2, d_year#6, d_qoy#7], [ss_addr_sk#1, ss_ext_sales_price#2, d_year#6, d_qoy#7] -(8) Scan parquet spark_catalog.default.customer_address +(8) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#8, ca_county#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -165,7 +165,7 @@ Input [4]: [ca_county#9, d_qoy#7, d_year#6, sum#10] Keys [3]: [ca_county#9, d_qoy#7, d_year#6] Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#2))] -(16) Scan parquet spark_catalog.default.store_sales +(16) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#11, ss_ext_sales_price#12, ss_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -177,7 +177,7 @@ ReadSchema: struct Input [3]: [ss_addr_sk#11, ss_ext_sales_price#12, ss_sold_date_sk#13] Condition : isnotnull(ss_addr_sk#11) -(18) Scan parquet spark_catalog.default.date_dim +(18) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_year#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -236,7 +236,7 @@ Left output [3]: [ca_county#9, d_year#6, store_sales#22] Right output [2]: [ca_county#19, store_sales#21] Arguments: [ca_county#9], [ca_county#19], Inner, BuildRight -(31) Scan parquet spark_catalog.default.store_sales +(31) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#23, ss_ext_sales_price#24, ss_sold_date_sk#25] Batched: true Location: InMemoryFileIndex [] @@ -248,7 +248,7 @@ ReadSchema: struct Input [3]: [ss_addr_sk#23, ss_ext_sales_price#24, ss_sold_date_sk#25] Condition : isnotnull(ss_addr_sk#23) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#27, d_year#28, d_qoy#29] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -311,7 +311,7 @@ Arguments: [ca_county#19], [ca_county#31], Inner, BuildRight Input [7]: [ca_county#9, d_year#6, store_sales#22, ca_county#19, store_sales#21, ca_county#31, store_sales#33] Arguments: [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33], [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33] -(47) Scan parquet spark_catalog.default.web_sales +(47) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#34, ws_ext_sales_price#35, ws_sold_date_sk#36] Batched: true Location: InMemoryFileIndex [] @@ -370,7 +370,7 @@ Left output [5]: [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_s Right output [2]: [ca_county#42, web_sales#44] Arguments: [ca_county#9], [ca_county#42], Inner, BuildRight -(60) Scan parquet spark_catalog.default.web_sales +(60) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#45, ws_ext_sales_price#46, ws_sold_date_sk#47] Batched: true Location: InMemoryFileIndex [] @@ -433,7 +433,7 @@ Arguments: [ca_county#42], [ca_county#53], Inner, (CASE WHEN (web_sales#44 > 0.0 Input [9]: [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33, ca_county#42, web_sales#44, ca_county#53, web_sales#55] Arguments: [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33, ca_county#42, web_sales#44, web_sales#55], [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33, ca_county#42, web_sales#44, web_sales#55] -(74) Scan parquet spark_catalog.default.web_sales +(74) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#56, ws_ext_sales_price#57, ws_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -516,7 +516,7 @@ BroadcastExchange (94) +- CometScan parquet spark_catalog.default.date_dim (91) -(91) Scan parquet spark_catalog.default.date_dim +(91) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -541,7 +541,7 @@ BroadcastExchange (98) +- CometScan parquet spark_catalog.default.date_dim (95) -(95) Scan parquet spark_catalog.default.date_dim +(95) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_year#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -566,7 +566,7 @@ BroadcastExchange (102) +- CometScan parquet spark_catalog.default.date_dim (99) -(99) Scan parquet spark_catalog.default.date_dim +(99) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#27, d_year#28, d_qoy#29] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q32/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q32/explain.txt index a0b604678..03edc9859 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q32/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q32/explain.txt @@ -31,7 +31,7 @@ +- ReusedExchange (24) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -43,7 +43,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3] Condition : (isnotnull(cs_item_sk#1) AND isnotnull(cs_ext_discount_amt#2)) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_manufact_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -71,7 +71,7 @@ Arguments: [cs_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [4]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5] Arguments: [cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5], [cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5] -(9) Scan parquet spark_catalog.default.catalog_sales +(9) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#7, cs_ext_discount_amt#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -83,7 +83,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#7, cs_ext_discount_amt#8, cs_sold_date_sk#9] Condition : isnotnull(cs_item_sk#7) -(11) Scan parquet spark_catalog.default.date_dim +(11) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -181,7 +181,7 @@ BroadcastExchange (35) +- CometScan parquet spark_catalog.default.date_dim (31) -(31) Scan parquet spark_catalog.default.date_dim +(31) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q33/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q33/explain.txt index a432ecd29..7a50fe69e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q33/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q33/explain.txt @@ -62,7 +62,7 @@ +- ReusedExchange (50) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -74,7 +74,7 @@ ReadSchema: struct Input [2]: [i_item_sk#11, i_manufact_id#12] Condition : isnotnull(i_item_sk#11) -(17) Scan parquet spark_catalog.default.item +(17) CometScan parquet spark_catalog.default.item Output [2]: [i_category#13, i_manufact_id#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -192,7 +192,7 @@ Input [2]: [i_manufact_id#12, sum#15] Keys [1]: [i_manufact_id#12] Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] -(28) Scan parquet spark_catalog.default.catalog_sales +(28) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#16, cs_item_sk#17, cs_ext_sales_price#18, cs_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -254,7 +254,7 @@ Input [2]: [i_manufact_id#24, sum#25] Keys [1]: [i_manufact_id#24] Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#18))] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#26, ws_bill_addr_sk#27, ws_ext_sales_price#28, ws_sold_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -352,7 +352,7 @@ BroadcastExchange (66) +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#7, d_moy#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q34/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q34/explain.txt index 7cfddd6a7..3223f7c72 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q34/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q34/explain.txt @@ -33,7 +33,7 @@ +- CometScan parquet spark_catalog.default.customer (25) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -45,7 +45,7 @@ ReadSchema: struct= 15) AND (cnt#17 <= 20)) -(25) Scan parquet spark_catalog.default.customer +(25) CometScan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -192,7 +192,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_dom#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q35/explain.txt index c06c1dd16..c1e19555c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q35/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q35/explain.txt @@ -45,7 +45,7 @@ TakeOrderedAndProject (44) +- CometScan parquet spark_catalog.default.customer_demographics (35) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -56,14 +56,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#12, ws_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ Right keys [1]: [ws_bill_customer_sk#12] Join type: ExistenceJoin(exists#2) Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -175,7 +175,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(29) Scan parquet spark_catalog.default.customer_address +(29) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_state#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -203,7 +203,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#4, ca_state#21] Input [4]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#20, ca_state#21] -(35) Scan parquet spark_catalog.default.customer_demographics +(35) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_dep_count#25, cd_dep_employed_count#26, cd_dep_college_count#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q36/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q36/explain.txt index 68397f3a2..5bd9122ff 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q36/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q36/explain.txt @@ -30,7 +30,7 @@ TakeOrderedAndProject (29) +- CometScan parquet spark_catalog.default.store (14) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -42,7 +42,7 @@ ReadSchema: struct= 68.00)) A Input [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(4) Scan parquet spark_catalog.default.inventory +(4) CometScan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#6, inv_quantity_on_hand#7, inv_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -71,7 +71,7 @@ Arguments: [i_item_sk#1], [inv_item_sk#6], Inner, BuildRight Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item_sk#6, inv_date_sk#8] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(17) Scan parquet spark_catalog.default.catalog_sales +(17) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_item_sk#12, cs_sold_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -158,7 +158,7 @@ BroadcastExchange (31) +- CometScan parquet spark_catalog.default.date_dim (27) -(27) Scan parquet spark_catalog.default.date_dim +(27) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q38/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q38/explain.txt index 598dccaf0..c0e7300df 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q38/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q38/explain.txt @@ -48,7 +48,7 @@ +- ReusedExchange (35) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ ReadSchema: struct Input [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Condition : isnotnull(ss_customer_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -88,7 +88,7 @@ Arguments: [ss_sold_date_sk#2], [d_date_sk#4], Inner, BuildRight Input [4]: [ss_customer_sk#1, ss_sold_date_sk#2, d_date_sk#4, d_date#5] Arguments: [ss_customer_sk#1, d_date#5], [ss_customer_sk#1, d_date#5] -(9) Scan parquet spark_catalog.default.customer +(9) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#7, c_first_name#8, c_last_name#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -126,7 +126,7 @@ Input [3]: [c_last_name#9, c_first_name#8, d_date#5] Keys [3]: [c_last_name#9, c_first_name#8, d_date#5] Functions: [] -(17) Scan parquet spark_catalog.default.catalog_sales +(17) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_bill_customer_sk#10, cs_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -185,7 +185,7 @@ Left output [3]: [c_last_name#9, c_first_name#8, d_date#5] Right output [3]: [c_last_name#17, c_first_name#16, d_date#14] Arguments: [coalesce(c_last_name#9, ), isnull(c_last_name#9), coalesce(c_first_name#8, ), isnull(c_first_name#8), coalesce(d_date#5, 1970-01-01), isnull(d_date#5)], [coalesce(c_last_name#17, ), isnull(c_last_name#17), coalesce(c_first_name#16, ), isnull(c_first_name#16), coalesce(d_date#14, 1970-01-01), isnull(d_date#14)], LeftSemi, BuildRight -(30) Scan parquet spark_catalog.default.web_sales +(30) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#18, ws_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -274,7 +274,7 @@ BroadcastExchange (52) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q39a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q39a/explain.txt index 96e213fff..ef9e118a8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q39a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q39a/explain.txt @@ -48,7 +48,7 @@ +- CometScan parquet spark_catalog.default.date_dim (32) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ ReadSchema: struct 0.000000)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#22, c_customer_id#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26, c_birth_country#27, c_login#28, c_email_address#29] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -191,7 +191,7 @@ ReadSchema: struct= 738)) AND (i_m Input [3]: [i_manufact_id#1, i_manufact#2, i_product_name#3] Arguments: [i_manufact#2, i_product_name#3], [i_manufact#2, i_product_name#3] -(4) Scan parquet spark_catalog.default.item +(4) CometScan parquet spark_catalog.default.item Output [5]: [i_category#4, i_manufact#5, i_size#6, i_color#7, i_units#8] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q42/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q42/explain.txt index c2c2c8bf3..b58d82c78 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q42/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q42/explain.txt @@ -20,7 +20,7 @@ +- CometScan parquet spark_catalog.default.item (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((((isnotnull(d_moy#3) AND isnotnull(d_year#2)) AND (d_moy#3 = 11)) Input [3]: [d_date_sk#1, d_year#2, d_moy#3] Arguments: [d_date_sk#1, d_year#2], [d_date_sk#1, d_year#2] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [5]: [d_date_sk#1, d_year#2, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Arguments: [d_year#2, ss_item_sk#4, ss_ext_sales_price#5], [d_year#2, ss_item_sk#4, ss_ext_sales_price#5] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_category_id#8, i_category#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q43/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q43/explain.txt index d6cb50a4f..2953a5f8f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q43/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q43/explain.txt @@ -20,7 +20,7 @@ +- CometScan parquet spark_catalog.default.store (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_day_name#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((isnotnull(d_year#2) AND (d_year#2 = 2000)) AND isnotnull(d_date_sk Input [3]: [d_date_sk#1, d_year#2, d_day_name#3] Arguments: [d_date_sk#1, d_day_name#3], [d_date_sk#1, d_day_name#3] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [5]: [d_date_sk#1, d_day_name#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Arguments: [d_day_name#3, ss_store_sk#4, ss_sales_price#5], [d_day_name#3, ss_store_sk#4, ss_sales_price#5] -(9) Scan parquet spark_catalog.default.store +(9) CometScan parquet spark_catalog.default.store Output [4]: [s_store_sk#7, s_store_id#8, s_store_name#9, s_gmt_offset#10] Batched: true Location [not included in comparison]/{warehouse_dir}/store] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q44/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q44/explain.txt index 57e6b0e89..337a09591 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q44/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q44/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- ReusedExchange (39) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_store_sk#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -176,7 +176,7 @@ Join condition: None Output [3]: [item_sk#7, rnk#11, item_sk#16] Input [4]: [item_sk#7, rnk#11, item_sk#16, rnk#18] -(33) Scan parquet spark_catalog.default.item +(33) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#19, i_product_name#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -233,7 +233,7 @@ Subquery:1 Hosting operator id = 7 Hosting Expression = Subquery scalar-subquery +- CometScan parquet spark_catalog.default.store_sales (43) -(43) Scan parquet spark_catalog.default.store_sales +(43) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_addr_sk#25, ss_store_sk#26, ss_net_profit#27, ss_sold_date_sk#28] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q45/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q45/explain.txt index f128499e3..962a51203 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q45/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q45/explain.txt @@ -37,7 +37,7 @@ TakeOrderedAndProject (36) +- CometScan parquet spark_catalog.default.item (25) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#2, ws_bill_customer_sk#3, ws_sales_price#4, ws_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -49,7 +49,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.store_sales +(3) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ Arguments: [i_item_sk#1], [ss_item_sk#4], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -108,7 +108,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -263,7 +263,7 @@ BroadcastExchange (51) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q48/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q48/explain.txt index 40181264b..70c7dc75f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q48/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q48/explain.txt @@ -29,7 +29,7 @@ +- CometScan parquet spark_catalog.default.date_dim (19) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [7]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_store_sk#3, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -41,7 +41,7 @@ ReadSchema: struct= 100.00) AND (ss_sales_price#5 <= 150.00)) OR ((ss_sales_price#5 >= 50.00) AND (ss_sales_price#5 <= 100.00))) OR ((ss_sales_price#5 >= 150.00) AND (ss_sales_price#5 <= 200.00)))) AND ((((ss_net_profit#6 >= 0.00) AND (ss_net_profit#6 <= 2000.00)) OR ((ss_net_profit#6 >= 150.00) AND (ss_net_profit#6 <= 3000.00))) OR ((ss_net_profit#6 >= 50.00) AND (ss_net_profit#6 <= 25000.00)))) -(3) Scan parquet spark_catalog.default.store +(3) CometScan parquet spark_catalog.default.store Output [1]: [s_store_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -65,7 +65,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#9], Inner, BuildRight Input [8]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_store_sk#3, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, s_store_sk#9] Arguments: [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7], [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.customer_demographics +(8) CometScan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#10, cd_marital_status#11, cd_education_status#12] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -89,7 +89,7 @@ Arguments: [ss_cdemo_sk#1], [cd_demo_sk#10], Inner, ((((((cd_marital_status#11 = Input [9]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, cd_demo_sk#10, cd_marital_status#11, cd_education_status#12] Arguments: [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7], [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7] -(13) Scan parquet spark_catalog.default.customer_address +(13) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#13, ca_state#14, ca_country#15] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -117,7 +117,7 @@ Arguments: [ss_addr_sk#2], [ca_address_sk#13], Inner, ((((ca_state#14 IN (CO,OH, Input [6]: [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7, ca_address_sk#13, ca_state#14] Arguments: [ss_quantity#4, ss_sold_date_sk#7], [ss_quantity#4, ss_sold_date_sk#7] -(19) Scan parquet spark_catalog.default.date_dim +(19) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -172,7 +172,7 @@ BroadcastExchange (33) +- CometScan parquet spark_catalog.default.date_dim (29) -(29) Scan parquet spark_catalog.default.date_dim +(29) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q49/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q49/explain.txt index f7e750d47..2def4544c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q49/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q49/explain.txt @@ -78,7 +78,7 @@ TakeOrderedAndProject (77) +- ReusedExchange (59) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [6]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_net_profit#5, ws_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_so Input [5]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] -(5) Scan parquet spark_catalog.default.web_returns +(5) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11, wr_returned_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -122,7 +122,7 @@ Arguments: [ws_order_number#2, ws_item_sk#1], [wr_order_number#9, wr_item_sk#8], Input [9]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11] Arguments: [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11], [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -195,7 +195,7 @@ Condition : ((return_rank#25 <= 10) OR (currency_rank#26 <= 10)) Output [5]: [web AS channel#27, item#22, return_ratio#23, return_rank#25, currency_rank#26] Input [5]: [item#22, return_ratio#23, currency_ratio#24, return_rank#25, currency_rank#26] -(27) Scan parquet spark_catalog.default.catalog_sales +(27) CometScan parquet spark_catalog.default.catalog_sales Output [6]: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_net_profit#32, cs_sold_date_sk#33] Batched: true Location: InMemoryFileIndex [] @@ -215,7 +215,7 @@ Arguments: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, c Input [5]: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_sold_date_sk#33] Arguments: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_sold_date_sk#33] -(31) Scan parquet spark_catalog.default.catalog_returns +(31) CometScan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#35, cr_order_number#36, cr_return_quantity#37, cr_return_amount#38, cr_returned_date_sk#39] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -296,7 +296,7 @@ Condition : ((return_rank#50 <= 10) OR (currency_rank#51 <= 10)) Output [5]: [catalog AS channel#52, item#47, return_ratio#48, return_rank#50, currency_rank#51] Input [5]: [item#47, return_ratio#48, currency_ratio#49, return_rank#50, currency_rank#51] -(50) Scan parquet spark_catalog.default.store_sales +(50) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_net_profit#57, ss_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -316,7 +316,7 @@ Arguments: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, Input [5]: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_sold_date_sk#58] Arguments: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_sold_date_sk#58] -(54) Scan parquet spark_catalog.default.store_returns +(54) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#60, sr_ticket_number#61, sr_return_quantity#62, sr_return_amt#63, sr_returned_date_sk#64] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -431,7 +431,7 @@ BroadcastExchange (82) +- CometScan parquet spark_catalog.default.date_dim (78) -(78) Scan parquet spark_catalog.default.date_dim +(78) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q5/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q5/explain.txt index 82b5c6690..02ad2f357 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q5/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q5/explain.txt @@ -71,7 +71,7 @@ TakeOrderedAndProject (70) +- CometScan parquet spark_catalog.default.web_site (56) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -87,7 +87,7 @@ Condition : isnotnull(ss_store_sk#1) Input [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Arguments: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11], [ss_store_sk#1 AS store_sk#6, ss_sold_date_sk#4 AS date_sk#7, ss_ext_sales_price#2 AS sales_price#8, ss_net_profit#3 AS profit#9, 0.00 AS return_amt#10, 0.00 AS net_loss#11] -(4) Scan parquet spark_catalog.default.store_returns +(4) CometScan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -107,7 +107,7 @@ Arguments: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, n Child 0 Input [6]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11] Child 1 Input [6]: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, net_loss#21] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -135,7 +135,7 @@ Arguments: [date_sk#7], [d_date_sk#22], Inner, BuildRight Input [7]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11, d_date_sk#22] Arguments: [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11], [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11] -(14) Scan parquet spark_catalog.default.store +(14) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#24, s_store_id#25] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -178,7 +178,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#8)), sum(UnscaledValue(return_amt# Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#8))#30, sum(UnscaledValue(return_amt#10))#31, sum(UnscaledValue(profit#9))#32, sum(UnscaledValue(net_loss#11))#33] Results [5]: [MakeDecimal(sum(UnscaledValue(sales_price#8))#30,17,2) AS sales#34, MakeDecimal(sum(UnscaledValue(return_amt#10))#31,17,2) AS returns#35, (MakeDecimal(sum(UnscaledValue(profit#9))#32,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#11))#33,17,2)) AS profit#36, store channel AS channel#37, concat(store, s_store_id#25) AS id#38] -(23) Scan parquet spark_catalog.default.catalog_sales +(23) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_catalog_page_sk#39, cs_ext_sales_price#40, cs_net_profit#41, cs_sold_date_sk#42] Batched: true Location: InMemoryFileIndex [] @@ -194,7 +194,7 @@ Condition : isnotnull(cs_catalog_page_sk#39) Input [4]: [cs_catalog_page_sk#39, cs_ext_sales_price#40, cs_net_profit#41, cs_sold_date_sk#42] Arguments: [page_sk#44, date_sk#45, sales_price#46, profit#47, return_amt#48, net_loss#49], [cs_catalog_page_sk#39 AS page_sk#44, cs_sold_date_sk#42 AS date_sk#45, cs_ext_sales_price#40 AS sales_price#46, cs_net_profit#41 AS profit#47, 0.00 AS return_amt#48, 0.00 AS net_loss#49] -(26) Scan parquet spark_catalog.default.catalog_returns +(26) CometScan parquet spark_catalog.default.catalog_returns Output [4]: [cr_catalog_page_sk#50, cr_return_amount#51, cr_net_loss#52, cr_returned_date_sk#53] Batched: true Location: InMemoryFileIndex [] @@ -226,7 +226,7 @@ Arguments: [date_sk#45], [d_date_sk#60], Inner, BuildRight Input [7]: [page_sk#44, date_sk#45, sales_price#46, profit#47, return_amt#48, net_loss#49, d_date_sk#60] Arguments: [page_sk#44, sales_price#46, profit#47, return_amt#48, net_loss#49], [page_sk#44, sales_price#46, profit#47, return_amt#48, net_loss#49] -(33) Scan parquet spark_catalog.default.catalog_page +(33) CometScan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#61, cp_catalog_page_id#62] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -269,7 +269,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#46)), sum(UnscaledValue(return_amt Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#46))#67, sum(UnscaledValue(return_amt#48))#68, sum(UnscaledValue(profit#47))#69, sum(UnscaledValue(net_loss#49))#70] Results [5]: [MakeDecimal(sum(UnscaledValue(sales_price#46))#67,17,2) AS sales#71, MakeDecimal(sum(UnscaledValue(return_amt#48))#68,17,2) AS returns#72, (MakeDecimal(sum(UnscaledValue(profit#47))#69,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#49))#70,17,2)) AS profit#73, catalog channel AS channel#74, concat(catalog_page, cp_catalog_page_id#62) AS id#75] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_web_site_sk#76, ws_ext_sales_price#77, ws_net_profit#78, ws_sold_date_sk#79] Batched: true Location: InMemoryFileIndex [] @@ -285,7 +285,7 @@ Condition : isnotnull(ws_web_site_sk#76) Input [4]: [ws_web_site_sk#76, ws_ext_sales_price#77, ws_net_profit#78, ws_sold_date_sk#79] Arguments: [wsr_web_site_sk#81, date_sk#82, sales_price#83, profit#84, return_amt#85, net_loss#86], [ws_web_site_sk#76 AS wsr_web_site_sk#81, ws_sold_date_sk#79 AS date_sk#82, ws_ext_sales_price#77 AS sales_price#83, ws_net_profit#78 AS profit#84, 0.00 AS return_amt#85, 0.00 AS net_loss#86] -(45) Scan parquet spark_catalog.default.web_returns +(45) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#87, wr_order_number#88, wr_return_amt#89, wr_net_loss#90, wr_returned_date_sk#91] Batched: true Location: InMemoryFileIndex [] @@ -296,7 +296,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -122,7 +122,7 @@ Arguments: hashpartitioning(item_sk#9, d_date#6, 5), ENSURE_REQUIREMENTS, [plan_ Input [3]: [item_sk#9, d_date#6, cume_sales#11] Arguments: [item_sk#9 ASC NULLS FIRST, d_date#6 ASC NULLS FIRST], false, 0 -(19) Scan parquet spark_catalog.default.store_sales +(19) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#12, ss_sales_price#13, ss_sold_date_sk#14] Batched: true Location: InMemoryFileIndex [] @@ -227,7 +227,7 @@ BroadcastExchange (45) +- CometScan parquet spark_catalog.default.date_dim (41) -(41) Scan parquet spark_catalog.default.date_dim +(41) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q52/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q52/explain.txt index eee36d229..42b974e53 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q52/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q52/explain.txt @@ -20,7 +20,7 @@ +- CometScan parquet spark_catalog.default.item (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((((isnotnull(d_moy#3) AND isnotnull(d_year#2)) AND (d_moy#3 = 11)) Input [3]: [d_date_sk#1, d_year#2, d_moy#3] Arguments: [d_date_sk#1, d_year#2], [d_date_sk#1, d_year#2] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [5]: [d_date_sk#1, d_year#2, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Arguments: [d_year#2, ss_item_sk#4, ss_ext_sales_price#5], [d_year#2, ss_item_sk#4, ss_ext_sales_price#5] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_brand_id#8, i_brand#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q53/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q53/explain.txt index 8156e275b..9b71fa400 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q53/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q53/explain.txt @@ -30,7 +30,7 @@ TakeOrderedAndProject (29) +- CometScan parquet spark_catalog.default.store (15) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4, i_manufact_id#5] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -45,7 +45,7 @@ Condition : ((((i_category#4 IN (Books Input [5]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4, i_manufact_id#5] Arguments: [i_item_sk#1, i_manufact_id#5], [i_item_sk#1, i_manufact_id#5] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#10, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -70,7 +70,7 @@ Arguments: [i_item_sk#1], [ss_item_sk#10], Inner, BuildRight Input [6]: [i_item_sk#1, i_manufact_id#5, ss_item_sk#10, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] Arguments: [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13], [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] -(9) Scan parquet spark_catalog.default.date_dim +(9) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_month_seq#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -98,7 +98,7 @@ Arguments: [ss_sold_date_sk#13], [d_date_sk#15], Inner, BuildRight Input [6]: [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13, d_date_sk#15, d_qoy#17] Arguments: [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, d_qoy#17], [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, d_qoy#17] -(15) Scan parquet spark_catalog.default.store +(15) CometScan parquet spark_catalog.default.store Output [1]: [s_store_sk#18] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -173,7 +173,7 @@ BroadcastExchange (34) +- CometScan parquet spark_catalog.default.date_dim (30) -(30) Scan parquet spark_catalog.default.date_dim +(30) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_month_seq#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q54/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q54/explain.txt index d5bd8e387..73422b292 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q54/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q54/explain.txt @@ -57,7 +57,7 @@ +- CometScan parquet spark_catalog.default.date_dim (43) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -73,7 +73,7 @@ Condition : (isnotnull(cs_item_sk#2) AND isnotnull(cs_bill_customer_sk#1)) Input [3]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_sold_date_sk#3] Arguments: [sold_date_sk#5, customer_sk#6, item_sk#7], [cs_sold_date_sk#3 AS sold_date_sk#5, cs_bill_customer_sk#1 AS customer_sk#6, cs_item_sk#2 AS item_sk#7] -(4) Scan parquet spark_catalog.default.web_sales +(4) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#8, ws_bill_customer_sk#9, ws_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -93,7 +93,7 @@ Arguments: [sold_date_sk#11, customer_sk#12, item_sk#13], [ws_sold_date_sk#10 AS Child 0 Input [3]: [sold_date_sk#5, customer_sk#6, item_sk#7] Child 1 Input [3]: [sold_date_sk#11, customer_sk#12, item_sk#13] -(8) Scan parquet spark_catalog.default.item +(8) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#14, i_class#15, i_category#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -121,7 +121,7 @@ Arguments: [item_sk#7], [i_item_sk#14], Inner, BuildRight Input [4]: [sold_date_sk#5, customer_sk#6, item_sk#7, i_item_sk#14] Arguments: [sold_date_sk#5, customer_sk#6], [sold_date_sk#5, customer_sk#6] -(14) Scan parquet spark_catalog.default.date_dim +(14) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#17, d_year#18, d_moy#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -149,7 +149,7 @@ Arguments: [sold_date_sk#5], [d_date_sk#17], Inner, BuildRight Input [3]: [sold_date_sk#5, customer_sk#6, d_date_sk#17] Arguments: [customer_sk#6], [customer_sk#6] -(20) Scan parquet spark_catalog.default.customer +(20) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#20, c_current_addr_sk#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -187,7 +187,7 @@ Input [2]: [c_customer_sk#20, c_current_addr_sk#21] Keys [2]: [c_customer_sk#20, c_current_addr_sk#21] Functions: [] -(28) Scan parquet spark_catalog.default.store_sales +(28) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_customer_sk#22, ss_ext_sales_price#23, ss_sold_date_sk#24] Batched: true Location: InMemoryFileIndex [] @@ -212,7 +212,7 @@ Arguments: [c_customer_sk#20], [ss_customer_sk#22], Inner, BuildRight Input [5]: [c_customer_sk#20, c_current_addr_sk#21, ss_customer_sk#22, ss_ext_sales_price#23, ss_sold_date_sk#24] Arguments: [c_customer_sk#20, c_current_addr_sk#21, ss_ext_sales_price#23, ss_sold_date_sk#24], [c_customer_sk#20, c_current_addr_sk#21, ss_ext_sales_price#23, ss_sold_date_sk#24] -(33) Scan parquet spark_catalog.default.customer_address +(33) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#26, ca_county#27, ca_state#28] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -236,7 +236,7 @@ Arguments: [c_current_addr_sk#21], [ca_address_sk#26], Inner, BuildRight Input [7]: [c_customer_sk#20, c_current_addr_sk#21, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_address_sk#26, ca_county#27, ca_state#28] Arguments: [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_county#27, ca_state#28], [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_county#27, ca_state#28] -(38) Scan parquet spark_catalog.default.store +(38) CometScan parquet spark_catalog.default.store Output [2]: [s_county#29, s_state#30] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -260,7 +260,7 @@ Arguments: [ca_county#27, ca_state#28], [s_county#29, s_state#30], Inner, BuildR Input [7]: [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_county#27, ca_state#28, s_county#29, s_state#30] Arguments: [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24], [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24] -(43) Scan parquet spark_catalog.default.date_dim +(43) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#31, d_month_seq#32] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -333,7 +333,7 @@ BroadcastExchange (61) +- CometScan parquet spark_catalog.default.date_dim (57) -(57) Scan parquet spark_catalog.default.date_dim +(57) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#17, d_year#18, d_moy#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -365,7 +365,7 @@ BroadcastExchange (66) +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#31, d_month_seq#32] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -397,7 +397,7 @@ Subquery:4 Hosting operator id = 63 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (67) -(67) Scan parquet spark_catalog.default.date_dim +(67) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#42, d_year#43, d_moy#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -439,7 +439,7 @@ Subquery:5 Hosting operator id = 63 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (74) -(74) Scan parquet spark_catalog.default.date_dim +(74) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#46, d_year#47, d_moy#48] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q55/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q55/explain.txt index 6eff9f52d..46240a3c0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q55/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q55/explain.txt @@ -20,7 +20,7 @@ +- CometScan parquet spark_catalog.default.item (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((((isnotnull(d_moy#3) AND isnotnull(d_year#2)) AND (d_moy#3 = 11)) Input [3]: [d_date_sk#1, d_year#2, d_moy#3] Arguments: [d_date_sk#1], [d_date_sk#1] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [4]: [d_date_sk#1, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Arguments: [ss_item_sk#4, ss_ext_sales_price#5], [ss_item_sk#4, ss_ext_sales_price#5] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_brand_id#8, i_brand#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q56/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q56/explain.txt index d9af63246..c56eb8287 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q56/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q56/explain.txt @@ -62,7 +62,7 @@ +- ReusedExchange (50) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -74,7 +74,7 @@ ReadSchema: struct Input [2]: [i_item_sk#11, i_item_id#12] Condition : isnotnull(i_item_sk#11) -(17) Scan parquet spark_catalog.default.item +(17) CometScan parquet spark_catalog.default.item Output [2]: [i_item_id#13, i_color#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -192,7 +192,7 @@ Input [2]: [i_item_id#12, sum#15] Keys [1]: [i_item_id#12] Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] -(28) Scan parquet spark_catalog.default.catalog_sales +(28) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#16, cs_item_sk#17, cs_ext_sales_price#18, cs_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -254,7 +254,7 @@ Input [2]: [i_item_id#24, sum#25] Keys [1]: [i_item_id#24] Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#18))] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#26, ws_bill_addr_sk#27, ws_ext_sales_price#28, ws_sold_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -352,7 +352,7 @@ BroadcastExchange (66) +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#7, d_moy#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q57/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q57/explain.txt index 629959102..78df07e8f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q57/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q57/explain.txt @@ -48,7 +48,7 @@ TakeOrderedAndProject (47) +- ReusedExchange (39) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#1, i_brand#2, i_category#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.catalog_sales +(3) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ Arguments: [i_item_sk#1], [cs_item_sk#5], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -108,7 +108,7 @@ Arguments: [cs_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.call_center +(13) CometScan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#12, cc_name#13] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] @@ -263,7 +263,7 @@ BroadcastExchange (51) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q58/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q58/explain.txt index f05dbd720..cd1e112f6 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q58/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q58/explain.txt @@ -52,7 +52,7 @@ +- ReusedExchange (40) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -64,7 +64,7 @@ ReadSchema: struct Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_item_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -88,7 +88,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [5]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, i_item_id#6] Arguments: [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6], [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -142,7 +142,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#2))] Input [2]: [item_id#11, ss_item_rev#12] Condition : isnotnull(ss_item_rev#12) -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#13, cs_ext_sales_price#14, cs_sold_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -209,7 +209,7 @@ Arguments: [item_id#11], [item_id#21], Inner, ((((cast(ss_item_rev#12 as decimal Input [4]: [item_id#11, ss_item_rev#12, item_id#21, cs_item_rev#22] Arguments: [item_id#11, ss_item_rev#12, cs_item_rev#22], [item_id#11, ss_item_rev#12, cs_item_rev#22] -(35) Scan parquet spark_catalog.default.web_sales +(35) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#23, ws_ext_sales_price#24, ws_sold_date_sk#25] Batched: true Location: InMemoryFileIndex [] @@ -298,7 +298,7 @@ BroadcastExchange (61) +- CometScan parquet spark_catalog.default.date_dim (54) -(52) Scan parquet spark_catalog.default.date_dim +(52) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -309,7 +309,7 @@ ReadSchema: struct Input [2]: [d_date_sk#7, d_date#8] Condition : isnotnull(d_date_sk#7) -(54) Scan parquet spark_catalog.default.date_dim +(54) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#9, d_week_seq#37] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -351,7 +351,7 @@ Subquery:2 Hosting operator id = 55 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#40, d_week_seq#41] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q59/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q59/explain.txt index a3f89c9dc..62311fb33 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q59/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q59/explain.txt @@ -40,7 +40,7 @@ +- CometScan parquet spark_catalog.default.date_dim (29) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#1, ss_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -52,7 +52,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_week_seq#5, d_day_name#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -90,7 +90,7 @@ Input [9]: [d_week_seq#5, ss_store_sk#1, sum#7, sum#8, sum#9, sum#10, sum#11, su Keys [2]: [d_week_seq#5, ss_store_sk#1] Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Monday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Tuesday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Wednesday) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Thursday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Friday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Saturday ) THEN ss_sales_price#2 END))] -(11) Scan parquet spark_catalog.default.store +(11) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#14, s_store_id#15, s_store_name#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -114,7 +114,7 @@ Arguments: [ss_store_sk#1], [s_store_sk#14], Inner, BuildRight Input [12]: [d_week_seq#5, ss_store_sk#1, sun_sales#17, mon_sales#18, tue_sales#19, wed_sales#20, thu_sales#21, fri_sales#22, sat_sales#23, s_store_sk#14, s_store_id#15, s_store_name#16] Arguments: [d_week_seq#5, sun_sales#17, mon_sales#18, tue_sales#19, wed_sales#20, thu_sales#21, fri_sales#22, sat_sales#23, s_store_id#15, s_store_name#16], [d_week_seq#5, sun_sales#17, mon_sales#18, tue_sales#19, wed_sales#20, thu_sales#21, fri_sales#22, sat_sales#23, s_store_id#15, s_store_name#16] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_month_seq#24, d_week_seq#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -150,7 +150,7 @@ Input [9]: [d_week_seq#36, ss_store_sk#37, sum#38, sum#39, sum#40, sum#41, sum#4 Keys [2]: [d_week_seq#36, ss_store_sk#37] Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#45 = Sunday ) THEN ss_sales_price#46 END)), sum(UnscaledValue(CASE WHEN (d_day_name#45 = Monday ) THEN ss_sales_price#46 END)), sum(UnscaledValue(CASE WHEN (d_day_name#45 = Tuesday ) THEN ss_sales_price#46 END)), sum(UnscaledValue(CASE WHEN (d_day_name#45 = Wednesday) THEN ss_sales_price#46 END)), sum(UnscaledValue(CASE WHEN (d_day_name#45 = Thursday ) THEN ss_sales_price#46 END)), sum(UnscaledValue(CASE WHEN (d_day_name#45 = Friday ) THEN ss_sales_price#46 END)), sum(UnscaledValue(CASE WHEN (d_day_name#45 = Saturday ) THEN ss_sales_price#46 END))] -(24) Scan parquet spark_catalog.default.store +(24) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#47, s_store_id#48] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -174,7 +174,7 @@ Arguments: [ss_store_sk#37], [s_store_sk#47], Inner, BuildRight Input [11]: [d_week_seq#36, ss_store_sk#37, sun_sales#49, mon_sales#50, tue_sales#51, wed_sales#52, thu_sales#53, fri_sales#54, sat_sales#55, s_store_sk#47, s_store_id#48] Arguments: [d_week_seq#36, sun_sales#49, mon_sales#50, tue_sales#51, wed_sales#52, thu_sales#53, fri_sales#54, sat_sales#55, s_store_id#48], [d_week_seq#36, sun_sales#49, mon_sales#50, tue_sales#51, wed_sales#52, thu_sales#53, fri_sales#54, sat_sales#55, s_store_id#48] -(29) Scan parquet spark_catalog.default.date_dim +(29) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_month_seq#56, d_week_seq#57] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q6/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q6/explain.txt index 7a8044667..e87f6ce76 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q6/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q6/explain.txt @@ -39,7 +39,7 @@ +- CometScan parquet spark_catalog.default.item (21) -(1) Scan parquet spark_catalog.default.customer_address +(1) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#1, ca_state#2] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -50,7 +50,7 @@ ReadSchema: struct Input [2]: [ca_address_sk#1, ca_state#2] Condition : isnotnull(ca_address_sk#1) -(3) Scan parquet spark_catalog.default.customer +(3) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#3, c_current_addr_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -74,7 +74,7 @@ Arguments: [ca_address_sk#1], [c_current_addr_sk#4], Inner, BuildRight Input [4]: [ca_address_sk#1, ca_state#2, c_customer_sk#3, c_current_addr_sk#4] Arguments: [ca_state#2, c_customer_sk#3], [ca_state#2, c_customer_sk#3] -(8) Scan parquet spark_catalog.default.store_sales +(8) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -99,7 +99,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], Inner, BuildRight Input [5]: [ca_state#2, c_customer_sk#3, ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Arguments: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7], [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7] -(13) Scan parquet spark_catalog.default.date_dim +(13) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -127,7 +127,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [4]: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7, d_date_sk#9] Arguments: [ca_state#2, ss_item_sk#5], [ca_state#2, ss_item_sk#5] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#13, i_current_price#14, i_category#15] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -138,7 +138,7 @@ ReadSchema: struct Input [3]: [i_item_sk#13, i_current_price#14, i_category#15] Condition : ((isnotnull(i_current_price#14) AND isnotnull(i_category#15)) AND isnotnull(i_item_sk#13)) -(21) Scan parquet spark_catalog.default.item +(21) CometScan parquet spark_catalog.default.item Output [2]: [i_current_price#16, i_category#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -228,7 +228,7 @@ BroadcastExchange (43) +- CometScan parquet spark_catalog.default.date_dim (39) -(39) Scan parquet spark_catalog.default.date_dim +(39) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -260,7 +260,7 @@ Subquery:2 Hosting operator id = 40 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (44) -(44) Scan parquet spark_catalog.default.date_dim +(44) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#24, d_year#25, d_moy#26] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q60/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q60/explain.txt index 3b5101925..0d64004d9 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q60/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q60/explain.txt @@ -62,7 +62,7 @@ +- ReusedExchange (50) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -74,7 +74,7 @@ ReadSchema: struct Input [2]: [i_item_sk#11, i_item_id#12] Condition : isnotnull(i_item_sk#11) -(17) Scan parquet spark_catalog.default.item +(17) CometScan parquet spark_catalog.default.item Output [2]: [i_item_id#13, i_category#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -192,7 +192,7 @@ Input [2]: [i_item_id#12, sum#15] Keys [1]: [i_item_id#12] Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] -(28) Scan parquet spark_catalog.default.catalog_sales +(28) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#16, cs_item_sk#17, cs_ext_sales_price#18, cs_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -254,7 +254,7 @@ Input [2]: [i_item_id#24, sum#25] Keys [1]: [i_item_id#24] Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#18))] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#26, ws_bill_addr_sk#27, ws_ext_sales_price#28, ws_sold_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -352,7 +352,7 @@ BroadcastExchange (66) +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#7, d_moy#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q61/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q61/explain.txt index 8fcd94392..4a157e64e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q61/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q61/explain.txt @@ -66,7 +66,7 @@ +- ReusedExchange (56) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_promo_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -78,7 +78,7 @@ ReadSchema: struct Input [2]: [s_store_sk#1, s_store_name#2] Condition : isnotnull(s_store_sk#1) -(3) Scan parquet spark_catalog.default.store_sales +(3) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -63,7 +63,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Condition : (isnotnull(ss_store_sk#4) AND isnotnull(ss_item_sk#3)) -(5) Scan parquet spark_catalog.default.date_dim +(5) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#8, d_month_seq#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -122,7 +122,7 @@ Arguments: [s_store_sk#1], [ss_store_sk#4], Inner, BuildRight Input [5]: [s_store_sk#1, s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#11] Arguments: [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#11], [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#11] -(18) Scan parquet spark_catalog.default.item +(18) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#12, i_item_desc#13, i_current_price#14, i_wholesale_cost#15, i_brand#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -146,7 +146,7 @@ Arguments: [ss_item_sk#3], [i_item_sk#12], Inner, BuildRight Input [9]: [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#11, i_item_sk#12, i_item_desc#13, i_current_price#14, i_wholesale_cost#15, i_brand#16] Arguments: [s_store_name#2, ss_store_sk#4, revenue#11, i_item_desc#13, i_current_price#14, i_wholesale_cost#15, i_brand#16], [s_store_name#2, ss_store_sk#4, revenue#11, i_item_desc#13, i_current_price#14, i_wholesale_cost#15, i_brand#16] -(23) Scan parquet spark_catalog.default.store_sales +(23) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#17, ss_store_sk#18, ss_sales_price#19, ss_sold_date_sk#20] Batched: true Location: InMemoryFileIndex [] @@ -232,7 +232,7 @@ BroadcastExchange (44) +- CometScan parquet spark_catalog.default.date_dim (40) -(40) Scan parquet spark_catalog.default.date_dim +(40) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#8, d_month_seq#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q66/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q66/explain.txt index c8cdc8094..693eb5662 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q66/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q66/explain.txt @@ -51,7 +51,7 @@ +- ReusedExchange (39) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [7]: [ws_sold_time_sk#1, ws_ship_mode_sk#2, ws_warehouse_sk#3, ws_quantity#4, ws_ext_sales_price#5, ws_net_paid#6, ws_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -63,7 +63,7 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [c_customer_sk#1], [ss_customer_sk#4], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ Right keys [1]: [ws_bill_customer_sk#10] Join type: LeftAnti Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#14, cs_sold_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -171,7 +171,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#2, c_current_addr_sk#3] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(28) Scan parquet spark_catalog.default.customer_address +(28) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#18, ca_state#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -203,7 +203,7 @@ Join condition: None Output [1]: [c_current_cdemo_sk#2] Input [3]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#18] -(35) Scan parquet spark_catalog.default.customer_demographics +(35) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#20, cd_gender#21, cd_marital_status#22, cd_education_status#23, cd_purchase_estimate#24, cd_credit_rating#25] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q7/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q7/explain.txt index 1f5e1338e..d18a60b0b 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q7/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q7/explain.txt @@ -31,7 +31,7 @@ +- CometScan parquet spark_catalog.default.promotion (20) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [8]: [ss_item_sk#1, ss_cdemo_sk#2, ss_promo_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -43,7 +43,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -89,7 +89,7 @@ Arguments: [ss_store_sk#1, ss_net_profit#2], [ss_store_sk#1, ss_net_profit#2] (9) ColumnarToRow [codegen id : 4] Input [2]: [ss_store_sk#1, ss_net_profit#2] -(10) Scan parquet spark_catalog.default.store +(10) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#7, s_county#8, s_state#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -103,7 +103,7 @@ Condition : isnotnull(s_store_sk#7) (12) ColumnarToRow [codegen id : 3] Input [3]: [s_store_sk#7, s_county#8, s_state#9] -(13) Scan parquet spark_catalog.default.store_sales +(13) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -115,7 +115,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Condition : isnotnull(ss_store_sk#10) -(15) Scan parquet spark_catalog.default.store +(15) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#14, s_state#15] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -264,7 +264,7 @@ BroadcastExchange (50) +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q71/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q71/explain.txt index 65ffab7e8..79a71af03 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q71/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q71/explain.txt @@ -38,7 +38,7 @@ +- CometScan parquet spark_catalog.default.time_dim (26) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#1, i_brand_id#2, i_brand#3, i_manager_id#4] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -57,7 +57,7 @@ Arguments: [i_item_sk#1, i_brand_id#2, i_brand#3], [i_item_sk#1, i_brand_id#2, i Input [3]: [i_item_sk#1, i_brand_id#2, i_brand#3] Arguments: [i_item_sk#1, i_brand_id#2, i_brand#3] -(5) Scan parquet spark_catalog.default.web_sales +(5) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_sold_time_sk#5, ws_item_sk#6, ws_ext_sales_price#7, ws_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -69,7 +69,7 @@ ReadSchema: struct date_add(d_d Input [10]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24, d_date_sk#28, d_date#29] Arguments: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] -(46) Scan parquet spark_catalog.default.promotion +(46) CometScan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#30] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -314,7 +314,7 @@ Arguments: hashpartitioning(cs_item_sk#4, cs_order_number#6, 5), ENSURE_REQUIREM Input [5]: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Arguments: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4 ASC NULLS FIRST, cs_order_number#6 ASC NULLS FIRST] -(53) Scan parquet spark_catalog.default.catalog_returns +(53) CometScan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#31, cr_order_number#32, cr_returned_date_sk#33] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -377,7 +377,7 @@ BroadcastExchange (69) +- CometScan parquet spark_catalog.default.date_dim (65) -(65) Scan parquet spark_catalog.default.date_dim +(65) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_date_sk#22, d_date#23, d_week_seq#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q73/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q73/explain.txt index 7530f5aa5..e4d1ff5a2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q73/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q73/explain.txt @@ -33,7 +33,7 @@ +- CometScan parquet spark_catalog.default.customer (25) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -45,7 +45,7 @@ ReadSchema: struct= 1) AND (cnt#17 <= 5)) -(25) Scan parquet spark_catalog.default.customer +(25) CometScan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -192,7 +192,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_dom#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q74/explain.txt index b23b0b48c..c7a559c8e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q74/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q74/explain.txt @@ -69,7 +69,7 @@ +- ReusedExchange (58) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -80,7 +80,7 @@ ReadSchema: struct 0.00)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#14, c_customer_id#15, c_first_name#16, c_last_name#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -158,7 +158,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -83,7 +83,7 @@ Arguments: [ss_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3, d_date_sk#5] Arguments: [ss_store_sk#1, ss_net_profit#2], [ss_store_sk#1, ss_net_profit#2] -(9) Scan parquet spark_catalog.default.store +(9) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#8, s_store_name#9, s_zip#10] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -107,7 +107,7 @@ Arguments: [ss_store_sk#1], [s_store_sk#8], Inner, BuildRight Input [5]: [ss_store_sk#1, ss_net_profit#2, s_store_sk#8, s_store_name#9, s_zip#10] Arguments: [ss_net_profit#2, s_store_name#9, s_zip#10], [ss_net_profit#2, s_store_name#9, s_zip#10] -(14) Scan parquet spark_catalog.default.customer_address +(14) CometScan parquet spark_catalog.default.customer_address Output [1]: [ca_zip#11] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -121,7 +121,7 @@ Condition : (substr(ca_zip#11, 1, 5) INSET 10144, 10336, 10390, 10445, 10516, 10 Input [1]: [ca_zip#11] Arguments: [ca_zip#12], [substr(ca_zip#11, 1, 5) AS ca_zip#12] -(17) Scan parquet spark_catalog.default.customer_address +(17) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#13, ca_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -132,7 +132,7 @@ ReadSchema: struct Input [2]: [ca_address_sk#13, ca_zip#14] Condition : isnotnull(ca_address_sk#13) -(19) Scan parquet spark_catalog.default.customer +(19) CometScan parquet spark_catalog.default.customer Output [2]: [c_current_addr_sk#15, c_preferred_cust_flag#16] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -249,7 +249,7 @@ BroadcastExchange (47) +- CometScan parquet spark_catalog.default.date_dim (43) -(43) Scan parquet spark_catalog.default.date_dim +(43) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q80/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q80/explain.txt index 962e04f07..65fe1a28b 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q80/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q80/explain.txt @@ -103,7 +103,7 @@ TakeOrderedAndProject (102) +- ReusedExchange (90) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -123,7 +123,7 @@ Arguments: hashpartitioning(ss_item_sk#1, ss_ticket_number#4, 5), ENSURE_REQUIRE Input [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Arguments: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7], [ss_item_sk#1 ASC NULLS FIRST, ss_ticket_number#4 ASC NULLS FIRST] -(5) Scan parquet spark_catalog.default.store_returns +(5) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#9, sr_ticket_number#10, sr_return_amt#11, sr_net_loss#12, sr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -155,7 +155,7 @@ Arguments: [ss_item_sk#1, ss_ticket_number#4], [sr_item_sk#9, sr_ticket_number#1 Input [11]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_item_sk#9, sr_ticket_number#10, sr_return_amt#11, sr_net_loss#12] Arguments: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12], [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12] -(12) Scan parquet spark_catalog.default.date_dim +(12) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -183,7 +183,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#14], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12, d_date_sk#14] Arguments: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12], [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12] -(18) Scan parquet spark_catalog.default.store +(18) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#16, s_store_id#17] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -207,7 +207,7 @@ Arguments: [ss_store_sk#2], [s_store_sk#16], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_sk#16, s_store_id#17] Arguments: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17], [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17] -(23) Scan parquet spark_catalog.default.item +(23) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#18, i_current_price#19] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -235,7 +235,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#18], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17, i_item_sk#18] Arguments: [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17], [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17] -(29) Scan parquet spark_catalog.default.promotion +(29) CometScan parquet spark_catalog.default.promotion Output [2]: [p_promo_sk#20, p_channel_tv#21] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -282,7 +282,7 @@ Functions [3]: [sum(UnscaledValue(ss_ext_sales_price#5)), sum(coalesce(cast(sr_r Aggregate Attributes [3]: [sum(UnscaledValue(ss_ext_sales_price#5))#27, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#28, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#29] Results [5]: [MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#5))#27,17,2) AS sales#30, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#28 AS returns#31, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#29 AS profit#32, store channel AS channel#33, concat(store, s_store_id#17) AS id#34] -(39) Scan parquet spark_catalog.default.catalog_sales +(39) CometScan parquet spark_catalog.default.catalog_sales Output [7]: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_order_number#38, cs_ext_sales_price#39, cs_net_profit#40, cs_sold_date_sk#41] Batched: true Location: InMemoryFileIndex [] @@ -302,7 +302,7 @@ Arguments: hashpartitioning(cs_item_sk#36, cs_order_number#38, 5), ENSURE_REQUIR Input [7]: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_order_number#38, cs_ext_sales_price#39, cs_net_profit#40, cs_sold_date_sk#41] Arguments: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_order_number#38, cs_ext_sales_price#39, cs_net_profit#40, cs_sold_date_sk#41], [cs_item_sk#36 ASC NULLS FIRST, cs_order_number#38 ASC NULLS FIRST] -(43) Scan parquet spark_catalog.default.catalog_returns +(43) CometScan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#43, cr_order_number#44, cr_return_amount#45, cr_net_loss#46, cr_returned_date_sk#47] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -346,7 +346,7 @@ Arguments: [cs_sold_date_sk#41], [d_date_sk#48], Inner, BuildRight Input [9]: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_ext_sales_price#39, cs_net_profit#40, cs_sold_date_sk#41, cr_return_amount#45, cr_net_loss#46, d_date_sk#48] Arguments: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_ext_sales_price#39, cs_net_profit#40, cr_return_amount#45, cr_net_loss#46], [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_ext_sales_price#39, cs_net_profit#40, cr_return_amount#45, cr_net_loss#46] -(53) Scan parquet spark_catalog.default.catalog_page +(53) CometScan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#49, cp_catalog_page_id#50] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -413,7 +413,7 @@ Functions [3]: [sum(UnscaledValue(cs_ext_sales_price#39)), sum(coalesce(cast(cr_ Aggregate Attributes [3]: [sum(UnscaledValue(cs_ext_sales_price#39))#58, sum(coalesce(cast(cr_return_amount#45 as decimal(12,2)), 0.00))#59, sum((cs_net_profit#40 - coalesce(cast(cr_net_loss#46 as decimal(12,2)), 0.00)))#60] Results [5]: [MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#39))#58,17,2) AS sales#61, sum(coalesce(cast(cr_return_amount#45 as decimal(12,2)), 0.00))#59 AS returns#62, sum((cs_net_profit#40 - coalesce(cast(cr_net_loss#46 as decimal(12,2)), 0.00)))#60 AS profit#63, catalog channel AS channel#64, concat(catalog_page, cp_catalog_page_id#50) AS id#65] -(68) Scan parquet spark_catalog.default.web_sales +(68) CometScan parquet spark_catalog.default.web_sales Output [7]: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_order_number#69, ws_ext_sales_price#70, ws_net_profit#71, ws_sold_date_sk#72] Batched: true Location: InMemoryFileIndex [] @@ -433,7 +433,7 @@ Arguments: hashpartitioning(ws_item_sk#66, ws_order_number#69, 5), ENSURE_REQUIR Input [7]: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_order_number#69, ws_ext_sales_price#70, ws_net_profit#71, ws_sold_date_sk#72] Arguments: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_order_number#69, ws_ext_sales_price#70, ws_net_profit#71, ws_sold_date_sk#72], [ws_item_sk#66 ASC NULLS FIRST, ws_order_number#69 ASC NULLS FIRST] -(72) Scan parquet spark_catalog.default.web_returns +(72) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#74, wr_order_number#75, wr_return_amt#76, wr_net_loss#77, wr_returned_date_sk#78] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -477,7 +477,7 @@ Arguments: [ws_sold_date_sk#72], [d_date_sk#79], Inner, BuildRight Input [9]: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_ext_sales_price#70, ws_net_profit#71, ws_sold_date_sk#72, wr_return_amt#76, wr_net_loss#77, d_date_sk#79] Arguments: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_ext_sales_price#70, ws_net_profit#71, wr_return_amt#76, wr_net_loss#77], [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_ext_sales_price#70, ws_net_profit#71, wr_return_amt#76, wr_net_loss#77] -(82) Scan parquet spark_catalog.default.web_site +(82) CometScan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#80, web_site_id#81] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] @@ -582,7 +582,7 @@ BroadcastExchange (107) +- CometScan parquet spark_catalog.default.date_dim (103) -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q81/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q81/explain.txt index e72c5b017..10e126714 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q81/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q81/explain.txt @@ -48,7 +48,7 @@ +- CometScan parquet spark_catalog.default.customer_address (41) -(1) Scan parquet spark_catalog.default.catalog_returns +(1) CometScan parquet spark_catalog.default.catalog_returns Output [4]: [cr_returning_customer_sk#1, cr_returning_addr_sk#2, cr_return_amt_inc_tax#3, cr_returned_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ ReadSchema: struct= 62.00)) A Input [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(4) Scan parquet spark_catalog.default.inventory +(4) CometScan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#6, inv_quantity_on_hand#7, inv_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -71,7 +71,7 @@ Arguments: [i_item_sk#1], [inv_item_sk#6], Inner, BuildRight Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item_sk#6, inv_date_sk#8] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(17) Scan parquet spark_catalog.default.store_sales +(17) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#12, ss_sold_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -158,7 +158,7 @@ BroadcastExchange (31) +- CometScan parquet spark_catalog.default.date_dim (27) -(27) Scan parquet spark_catalog.default.date_dim +(27) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q83/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q83/explain.txt index ffd8a5eb1..a84354c40 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q83/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q83/explain.txt @@ -49,7 +49,7 @@ +- ReusedExchange (38) -(1) Scan parquet spark_catalog.default.store_returns +(1) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -61,7 +61,7 @@ ReadSchema: struct Input [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3] Condition : isnotnull(sr_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_item_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -85,7 +85,7 @@ Arguments: [sr_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [5]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3, i_item_sk#5, i_item_id#6] Arguments: [sr_return_quantity#2, sr_returned_date_sk#3, i_item_id#6], [sr_return_quantity#2, sr_returned_date_sk#3, i_item_id#6] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -135,7 +135,7 @@ Input [2]: [i_item_id#6, sum#10] Keys [1]: [i_item_id#6] Functions [1]: [sum(sr_return_quantity#2)] -(19) Scan parquet spark_catalog.default.catalog_returns +(19) CometScan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#11, cr_return_quantity#12, cr_returned_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -198,7 +198,7 @@ Arguments: [item_id#21], [item_id#19], Inner, BuildRight Input [4]: [item_id#21, sr_item_qty#22, item_id#19, cr_item_qty#20] Arguments: [item_id#21, sr_item_qty#22, cr_item_qty#20], [item_id#21, sr_item_qty#22, cr_item_qty#20] -(33) Scan parquet spark_catalog.default.web_returns +(33) CometScan parquet spark_catalog.default.web_returns Output [3]: [wr_item_sk#23, wr_return_quantity#24, wr_returned_date_sk#25] Batched: true Location: InMemoryFileIndex [] @@ -287,7 +287,7 @@ BroadcastExchange (62) +- CometScan parquet spark_catalog.default.date_dim (52) -(49) Scan parquet spark_catalog.default.date_dim +(49) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -298,13 +298,13 @@ ReadSchema: struct Input [2]: [d_date_sk#7, d_date#8] Condition : isnotnull(d_date_sk#7) -(51) Scan parquet spark_catalog.default.date_dim +(51) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#9, d_week_seq#37] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] ReadSchema: struct -(52) Scan parquet spark_catalog.default.date_dim +(52) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#38, d_week_seq#39] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q84/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q84/explain.txt index 468af40d2..572fd7a66 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q84/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q84/explain.txt @@ -33,7 +33,7 @@ TakeOrderedAndProject (32) +- CometScan parquet spark_catalog.default.store_returns (26) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [6]: [c_customer_id#1, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, c_first_name#5, c_last_name#6] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -44,7 +44,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -64,7 +64,7 @@ Arguments: [ws_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3, d_date_sk#5] Arguments: [ws_item_sk#1, ws_net_paid#2], [ws_item_sk#1, ws_net_paid#2] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#7, i_class#8, i_category#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -139,7 +139,7 @@ BroadcastExchange (28) +- CometScan parquet spark_catalog.default.date_dim (24) -(24) Scan parquet spark_catalog.default.date_dim +(24) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q87/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q87/explain.txt index d023b5b31..28b942e2f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q87/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q87/explain.txt @@ -50,7 +50,7 @@ +- ReusedExchange (37) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Batched: true Location: InMemoryFileIndex [] @@ -62,7 +62,7 @@ ReadSchema: struct Input [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Condition : isnotnull(ss_customer_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -90,7 +90,7 @@ Arguments: [ss_sold_date_sk#2], [d_date_sk#4], Inner, BuildRight Input [4]: [ss_customer_sk#1, ss_sold_date_sk#2, d_date_sk#4, d_date#5] Arguments: [ss_customer_sk#1, d_date#5], [ss_customer_sk#1, d_date#5] -(9) Scan parquet spark_catalog.default.customer +(9) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#7, c_first_name#8, c_last_name#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -131,7 +131,7 @@ Functions: [] (17) ColumnarToRow [codegen id : 3] Input [3]: [c_last_name#9, c_first_name#8, d_date#5] -(18) Scan parquet spark_catalog.default.catalog_sales +(18) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_bill_customer_sk#10, cs_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -194,7 +194,7 @@ Right keys [6]: [coalesce(c_last_name#17, ), isnull(c_last_name#17), coalesce(c_ Join type: LeftAnti Join condition: None -(32) Scan parquet spark_catalog.default.web_sales +(32) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#18, ws_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -289,7 +289,7 @@ BroadcastExchange (54) +- CometScan parquet spark_catalog.default.date_dim (50) -(50) Scan parquet spark_catalog.default.date_dim +(50) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q88/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q88/explain.txt index 1dface7ea..592e23cd2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q88/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q88/explain.txt @@ -173,7 +173,7 @@ +- ReusedExchange (164) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -188,7 +188,7 @@ Condition : ((isnotnull(ss_hdemo_sk#2) AND isnotnull(ss_sold_time_sk#1)) AND isn Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Arguments: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3], [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3] -(4) Scan parquet spark_catalog.default.household_demographics +(4) CometScan parquet spark_catalog.default.household_demographics Output [3]: [hd_demo_sk#5, hd_dep_count#6, hd_vehicle_count#7] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -216,7 +216,7 @@ Arguments: [ss_hdemo_sk#2], [hd_demo_sk#5], Inner, BuildRight Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, hd_demo_sk#5] Arguments: [ss_sold_time_sk#1, ss_store_sk#3], [ss_sold_time_sk#1, ss_store_sk#3] -(10) Scan parquet spark_catalog.default.time_dim +(10) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#8, t_hour#9, t_minute#10] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -244,7 +244,7 @@ Arguments: [ss_sold_time_sk#1], [t_time_sk#8], Inner, BuildRight Input [3]: [ss_sold_time_sk#1, ss_store_sk#3, t_time_sk#8] Arguments: [ss_store_sk#3], [ss_store_sk#3] -(16) Scan parquet spark_catalog.default.store +(16) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#11, s_store_name#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -288,7 +288,7 @@ Functions [1]: [count(1)] (25) ColumnarToRow [codegen id : 8] Input [1]: [h8_30_to_9#14] -(26) Scan parquet spark_catalog.default.store_sales +(26) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#15, ss_hdemo_sk#16, ss_store_sk#17, ss_sold_date_sk#18] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -315,7 +315,7 @@ Arguments: [ss_hdemo_sk#16], [hd_demo_sk#19], Inner, BuildRight Input [4]: [ss_sold_time_sk#15, ss_hdemo_sk#16, ss_store_sk#17, hd_demo_sk#19] Arguments: [ss_sold_time_sk#15, ss_store_sk#17], [ss_sold_time_sk#15, ss_store_sk#17] -(32) Scan parquet spark_catalog.default.time_dim +(32) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#20, t_hour#21, t_minute#22] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -379,7 +379,7 @@ Arguments: IdentityBroadcastMode, [plan_id=3] Join type: Inner Join condition: None -(47) Scan parquet spark_catalog.default.store_sales +(47) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#26, ss_hdemo_sk#27, ss_store_sk#28, ss_sold_date_sk#29] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -406,7 +406,7 @@ Arguments: [ss_hdemo_sk#27], [hd_demo_sk#30], Inner, BuildRight Input [4]: [ss_sold_time_sk#26, ss_hdemo_sk#27, ss_store_sk#28, hd_demo_sk#30] Arguments: [ss_sold_time_sk#26, ss_store_sk#28], [ss_sold_time_sk#26, ss_store_sk#28] -(53) Scan parquet spark_catalog.default.time_dim +(53) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#31, t_hour#32, t_minute#33] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -470,7 +470,7 @@ Arguments: IdentityBroadcastMode, [plan_id=5] Join type: Inner Join condition: None -(68) Scan parquet spark_catalog.default.store_sales +(68) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#37, ss_hdemo_sk#38, ss_store_sk#39, ss_sold_date_sk#40] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -497,7 +497,7 @@ Arguments: [ss_hdemo_sk#38], [hd_demo_sk#41], Inner, BuildRight Input [4]: [ss_sold_time_sk#37, ss_hdemo_sk#38, ss_store_sk#39, hd_demo_sk#41] Arguments: [ss_sold_time_sk#37, ss_store_sk#39], [ss_sold_time_sk#37, ss_store_sk#39] -(74) Scan parquet spark_catalog.default.time_dim +(74) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#42, t_hour#43, t_minute#44] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -561,7 +561,7 @@ Arguments: IdentityBroadcastMode, [plan_id=7] Join type: Inner Join condition: None -(89) Scan parquet spark_catalog.default.store_sales +(89) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#48, ss_hdemo_sk#49, ss_store_sk#50, ss_sold_date_sk#51] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -588,7 +588,7 @@ Arguments: [ss_hdemo_sk#49], [hd_demo_sk#52], Inner, BuildRight Input [4]: [ss_sold_time_sk#48, ss_hdemo_sk#49, ss_store_sk#50, hd_demo_sk#52] Arguments: [ss_sold_time_sk#48, ss_store_sk#50], [ss_sold_time_sk#48, ss_store_sk#50] -(95) Scan parquet spark_catalog.default.time_dim +(95) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#53, t_hour#54, t_minute#55] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -652,7 +652,7 @@ Arguments: IdentityBroadcastMode, [plan_id=9] Join type: Inner Join condition: None -(110) Scan parquet spark_catalog.default.store_sales +(110) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#59, ss_hdemo_sk#60, ss_store_sk#61, ss_sold_date_sk#62] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -679,7 +679,7 @@ Arguments: [ss_hdemo_sk#60], [hd_demo_sk#63], Inner, BuildRight Input [4]: [ss_sold_time_sk#59, ss_hdemo_sk#60, ss_store_sk#61, hd_demo_sk#63] Arguments: [ss_sold_time_sk#59, ss_store_sk#61], [ss_sold_time_sk#59, ss_store_sk#61] -(116) Scan parquet spark_catalog.default.time_dim +(116) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#64, t_hour#65, t_minute#66] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -743,7 +743,7 @@ Arguments: IdentityBroadcastMode, [plan_id=11] Join type: Inner Join condition: None -(131) Scan parquet spark_catalog.default.store_sales +(131) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#70, ss_hdemo_sk#71, ss_store_sk#72, ss_sold_date_sk#73] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -770,7 +770,7 @@ Arguments: [ss_hdemo_sk#71], [hd_demo_sk#74], Inner, BuildRight Input [4]: [ss_sold_time_sk#70, ss_hdemo_sk#71, ss_store_sk#72, hd_demo_sk#74] Arguments: [ss_sold_time_sk#70, ss_store_sk#72], [ss_sold_time_sk#70, ss_store_sk#72] -(137) Scan parquet spark_catalog.default.time_dim +(137) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#75, t_hour#76, t_minute#77] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -834,7 +834,7 @@ Arguments: IdentityBroadcastMode, [plan_id=13] Join type: Inner Join condition: None -(152) Scan parquet spark_catalog.default.store_sales +(152) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#81, ss_hdemo_sk#82, ss_store_sk#83, ss_sold_date_sk#84] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -861,7 +861,7 @@ Arguments: [ss_hdemo_sk#82], [hd_demo_sk#85], Inner, BuildRight Input [4]: [ss_sold_time_sk#81, ss_hdemo_sk#82, ss_store_sk#83, hd_demo_sk#85] Arguments: [ss_sold_time_sk#81, ss_store_sk#83], [ss_sold_time_sk#81, ss_store_sk#83] -(158) Scan parquet spark_catalog.default.time_dim +(158) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#86, t_hour#87, t_minute#88] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q89/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q89/explain.txt index 6789e5b18..143ff0868 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q89/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q89/explain.txt @@ -29,7 +29,7 @@ TakeOrderedAndProject (28) +- CometScan parquet spark_catalog.default.store (14) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -40,7 +40,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_ext_discount_amt#2, ws_sold_date_sk#3] Condition : (isnotnull(ws_item_sk#1) AND isnotnull(ws_ext_discount_amt#2)) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_manufact_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -71,7 +71,7 @@ Arguments: [ws_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [4]: [ws_item_sk#1, ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5] Arguments: [ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5], [ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5] -(9) Scan parquet spark_catalog.default.web_sales +(9) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#7, ws_ext_discount_amt#8, ws_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -83,7 +83,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#7, ws_ext_discount_amt#8, ws_sold_date_sk#9] Condition : isnotnull(ws_item_sk#7) -(11) Scan parquet spark_catalog.default.date_dim +(11) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -181,7 +181,7 @@ BroadcastExchange (35) +- CometScan parquet spark_catalog.default.date_dim (31) -(31) Scan parquet spark_catalog.default.date_dim +(31) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q93/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q93/explain.txt index 4c672e397..dc64f3c4c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q93/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q93/explain.txt @@ -23,7 +23,7 @@ +- CometScan parquet spark_catalog.default.reason (12) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -41,7 +41,7 @@ Arguments: hashpartitioning(ss_item_sk#1, ss_ticket_number#3, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5], [ss_item_sk#1 ASC NULLS FIRST, ss_ticket_number#3 ASC NULLS FIRST] -(5) Scan parquet spark_catalog.default.store_returns +(5) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#7, sr_reason_sk#8, sr_ticket_number#9, sr_return_quantity#10, sr_returned_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -73,7 +73,7 @@ Arguments: [ss_item_sk#1, ss_ticket_number#3], [sr_item_sk#7, sr_ticket_number#9 Input [9]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5, sr_item_sk#7, sr_reason_sk#8, sr_ticket_number#9, sr_return_quantity#10] Arguments: [ss_customer_sk#2, ss_quantity#4, ss_sales_price#5, sr_reason_sk#8, sr_return_quantity#10], [ss_customer_sk#2, ss_quantity#4, ss_sales_price#5, sr_reason_sk#8, sr_return_quantity#10] -(12) Scan parquet spark_catalog.default.reason +(12) CometScan parquet spark_catalog.default.reason Output [2]: [r_reason_sk#12, r_reason_desc#13] Batched: true Location [not included in comparison]/{warehouse_dir}/reason] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q94/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q94/explain.txt index 8f23b9979..4dd9246cd 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q94/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q94/explain.txt @@ -41,7 +41,7 @@ +- CometScan parquet spark_catalog.default.web_site (29) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [8]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, ws_sold_date_sk#8] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -64,7 +64,7 @@ Arguments: hashpartitioning(ws_order_number#5, 5), ENSURE_REQUIREMENTS, CometNat Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Arguments: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_order_number#5 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.web_sales +(6) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_warehouse_sk#9, ws_order_number#10, ws_sold_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -91,7 +91,7 @@ Arguments: [ws_order_number#5], [ws_order_number#10], LeftSemi, NOT (ws_warehous Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Arguments: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(12) Scan parquet spark_catalog.default.web_returns +(12) CometScan parquet spark_catalog.default.web_returns Output [2]: [wr_order_number#12, wr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -114,7 +114,7 @@ Left output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_ord Right output [1]: [wr_order_number#12] Arguments: [ws_order_number#5], [wr_order_number#12], LeftAnti -(17) Scan parquet spark_catalog.default.date_dim +(17) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -142,7 +142,7 @@ Arguments: [ws_ship_date_sk#1], [d_date_sk#14], Inner, BuildRight Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, d_date_sk#14] Arguments: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(23) Scan parquet spark_catalog.default.customer_address +(23) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_state#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -170,7 +170,7 @@ Arguments: [ws_ship_addr_sk#2], [ca_address_sk#16], Inner, BuildRight Input [6]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, ca_address_sk#16] Arguments: [ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(29) Scan parquet spark_catalog.default.web_site +(29) CometScan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#18, web_company_name#19] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q95/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q95/explain.txt index bd41f3194..986abf83c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q95/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q95/explain.txt @@ -54,7 +54,7 @@ +- CometScan parquet spark_catalog.default.web_site (42) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, ws_sold_date_sk#7] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -77,7 +77,7 @@ Arguments: hashpartitioning(ws_order_number#4, 5), ENSURE_REQUIREMENTS, CometNat Input [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] Arguments: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6], [ws_order_number#4 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.web_sales +(6) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_warehouse_sk#8, ws_order_number#9, ws_sold_date_sk#10] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -121,7 +121,7 @@ Left output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_ord Right output [1]: [ws_order_number#9] Arguments: [ws_order_number#4], [ws_order_number#9], LeftSemi -(16) Scan parquet spark_catalog.default.web_returns +(16) CometScan parquet spark_catalog.default.web_returns Output [2]: [wr_order_number#13, wr_returned_date_sk#14] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -181,7 +181,7 @@ Left output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_ord Right output [1]: [wr_order_number#13] Arguments: [ws_order_number#4], [wr_order_number#13], LeftSemi -(30) Scan parquet spark_catalog.default.date_dim +(30) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#19, d_date#20] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -209,7 +209,7 @@ Arguments: [ws_ship_date_sk#1], [d_date_sk#19], Inner, BuildRight Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, d_date_sk#19] Arguments: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6], [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] -(36) Scan parquet spark_catalog.default.customer_address +(36) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#21, ca_state#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -237,7 +237,7 @@ Arguments: [ws_ship_addr_sk#2], [ca_address_sk#21], Inner, BuildRight Input [6]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, ca_address_sk#21] Arguments: [ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6], [ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] -(42) Scan parquet spark_catalog.default.web_site +(42) CometScan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#23, web_company_name#24] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q96/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q96/explain.txt index c4b2cf973..d2e63bee2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q96/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q96/explain.txt @@ -26,7 +26,7 @@ +- CometScan parquet spark_catalog.default.store (16) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -41,7 +41,7 @@ Condition : ((isnotnull(ss_hdemo_sk#2) AND isnotnull(ss_sold_time_sk#1)) AND isn Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Arguments: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3], [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3] -(4) Scan parquet spark_catalog.default.household_demographics +(4) CometScan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#5, hd_dep_count#6] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -69,7 +69,7 @@ Arguments: [ss_hdemo_sk#2], [hd_demo_sk#5], Inner, BuildRight Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, hd_demo_sk#5] Arguments: [ss_sold_time_sk#1, ss_store_sk#3], [ss_sold_time_sk#1, ss_store_sk#3] -(10) Scan parquet spark_catalog.default.time_dim +(10) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#7, t_hour#8, t_minute#9] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -97,7 +97,7 @@ Arguments: [ss_sold_time_sk#1], [t_time_sk#7], Inner, BuildRight Input [3]: [ss_sold_time_sk#1, ss_store_sk#3, t_time_sk#7] Arguments: [ss_store_sk#3], [ss_store_sk#3] -(16) Scan parquet spark_catalog.default.store +(16) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#10, s_store_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q97/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q97/explain.txt index 2eec0079d..1c6e9b78c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q97/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q97/explain.txt @@ -26,14 +26,14 @@ +- ReusedExchange (13) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_customer_sk#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] PartitionFilters: [isnotnull(ss_sold_date_sk#3), dynamicpruningexpression(ss_sold_date_sk#3 IN dynamicpruning#4)] ReadSchema: struct -(2) Scan parquet spark_catalog.default.date_dim +(2) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -79,7 +79,7 @@ Functions: [] Input [2]: [customer_sk#7, item_sk#8] Arguments: [customer_sk#7, item_sk#8], [customer_sk#7 ASC NULLS FIRST, item_sk#8 ASC NULLS FIRST] -(12) Scan parquet spark_catalog.default.catalog_sales +(12) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#9, cs_item_sk#10, cs_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -152,7 +152,7 @@ BroadcastExchange (30) +- CometScan parquet spark_catalog.default.date_dim (26) -(26) Scan parquet spark_catalog.default.date_dim +(26) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q98/explain.txt index 0a9810928..593065ba0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q98/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q98/explain.txt @@ -26,7 +26,7 @@ +- CometScan parquet spark_catalog.default.date_dim (8) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -38,7 +38,7 @@ ReadSchema: struct Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -62,7 +62,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -148,7 +148,7 @@ BroadcastExchange (30) +- CometScan parquet spark_catalog.default.date_dim (26) -(26) Scan parquet spark_catalog.default.date_dim +(26) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q99/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q99/explain.txt index 7935bb4c6..6dfcf8b32 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q99/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark3_5/q99/explain.txt @@ -29,7 +29,7 @@ +- CometScan parquet spark_catalog.default.date_dim (18) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [5]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_ship_mode_sk#3, cs_warehouse_sk#4, cs_sold_date_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -40,7 +40,7 @@ ReadSchema: struct (avg(ctr_total_ret Output [2]: [ctr_customer_sk#11, ctr_store_sk#12] Input [5]: [ctr_customer_sk#11, ctr_store_sk#12, ctr_total_return#13, (avg(ctr_total_return) * 1.2)#29, ctr_store_sk#22] -(30) Scan parquet spark_catalog.default.store +(30) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#30, s_state#31] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -222,7 +222,7 @@ Join condition: None Output [1]: [ctr_customer_sk#11] Input [3]: [ctr_customer_sk#11, ctr_store_sk#12, s_store_sk#30] -(37) Scan parquet spark_catalog.default.customer +(37) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#32, c_customer_id#33] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -264,7 +264,7 @@ BroadcastExchange (48) +- CometScan parquet spark_catalog.default.date_dim (44) -(44) Scan parquet spark_catalog.default.date_dim +(44) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_year#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q10/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q10/explain.txt index 4a29b7260..e32e86f27 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q10/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q10/explain.txt @@ -46,7 +46,7 @@ TakeOrderedAndProject (45) +- CometScan parquet spark_catalog.default.customer_demographics (36) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -57,14 +57,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -104,7 +104,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#12, ws_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -136,7 +136,7 @@ Right keys [1]: [ws_bill_customer_sk#12] Join type: ExistenceJoin(exists#2) Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -176,7 +176,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(29) Scan parquet spark_catalog.default.customer_address +(29) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_county#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -208,7 +208,7 @@ Join condition: None Output [1]: [c_current_cdemo_sk#4] Input [3]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#20] -(36) Scan parquet spark_catalog.default.customer_demographics +(36) CometScan parquet spark_catalog.default.customer_demographics Output [9]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_education_status#25, cd_purchase_estimate#26, cd_credit_rating#27, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -268,7 +268,7 @@ BroadcastExchange (50) +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q11/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q11/explain.txt index 3f5dd24f2..f14158092 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q11/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q11/explain.txt @@ -73,7 +73,7 @@ TakeOrderedAndProject (72) +- ReusedExchange (62) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -84,7 +84,7 @@ ReadSchema: struct 0.00)) -(18) Scan parquet spark_catalog.default.customer +(18) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#21, c_customer_id#22, c_first_name#23, c_last_name#24, c_preferred_cust_flag#25, c_birth_country#26, c_login#27, c_email_address#28] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -169,7 +169,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [ws_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -138,7 +138,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q13/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q13/explain.txt index e137f931f..b4a7b8724 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q13/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q13/explain.txt @@ -34,7 +34,7 @@ +- CometScan parquet spark_catalog.default.household_demographics (25) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [10]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -46,7 +46,7 @@ ReadSchema: struct= 100.00) AND (ss_net_profit#9 <= 200.00)) OR ((ss_net_profit#9 >= 150.00) AND (ss_net_profit#9 <= 300.00))) OR ((ss_net_profit#9 >= 50.00) AND (ss_net_profit#9 <= 250.00)))) AND ((((ss_sales_price#6 >= 100.00) AND (ss_sales_price#6 <= 150.00)) OR ((ss_sales_price#6 >= 50.00) AND (ss_sales_price#6 <= 100.00))) OR ((ss_sales_price#6 >= 150.00) AND (ss_sales_price#6 <= 200.00)))) -(3) Scan parquet spark_catalog.default.store +(3) CometScan parquet spark_catalog.default.store Output [1]: [s_store_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -70,7 +70,7 @@ Arguments: [ss_store_sk#4], [s_store_sk#12], Inner, BuildRight Input [11]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10, s_store_sk#12] Arguments: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10], [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10] -(8) Scan parquet spark_catalog.default.customer_address +(8) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#13, ca_state#14, ca_country#15] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -98,7 +98,7 @@ Arguments: [ss_addr_sk#3], [ca_address_sk#13], Inner, ((((ca_state#14 IN (TX,OH) Input [11]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10, ca_address_sk#13, ca_state#14] Arguments: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10], [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10] -(14) Scan parquet spark_catalog.default.date_dim +(14) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -126,7 +126,7 @@ Arguments: [ss_sold_date_sk#10], [d_date_sk#16], Inner, BuildRight Input [8]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10, d_date_sk#16] Arguments: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8], [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8] -(20) Scan parquet spark_catalog.default.customer_demographics +(20) CometScan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#18, cd_marital_status#19, cd_education_status#20] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -150,7 +150,7 @@ Arguments: [ss_cdemo_sk#1], [cd_demo_sk#18], Inner, ((((((cd_marital_status#19 = Input [9]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_demo_sk#18, cd_marital_status#19, cd_education_status#20] Arguments: [ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_marital_status#19, cd_education_status#20], [ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_marital_status#19, cd_education_status#20] -(25) Scan parquet spark_catalog.default.household_demographics +(25) CometScan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#21, hd_dep_count#22] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -205,7 +205,7 @@ BroadcastExchange (38) +- CometScan parquet spark_catalog.default.date_dim (34) -(34) Scan parquet spark_catalog.default.date_dim +(34) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q14a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q14a/explain.txt index e8d67e286..9649f8b07 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q14a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q14a/explain.txt @@ -105,7 +105,7 @@ TakeOrderedAndProject (104) +- ReusedExchange (90) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -117,7 +117,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -128,7 +128,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -151,7 +151,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -187,7 +187,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -263,7 +263,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -330,7 +330,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -362,7 +362,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -419,7 +419,7 @@ Condition : (isnotnull(sales#54) AND (cast(sales#54 as decimal(32,6)) > cast(Sub Output [6]: [sales#54, number_sales#55, store AS channel#58, i_brand_id#40 AS i_brand_id#59, i_class_id#41 AS i_class_id#60, i_category_id#42 AS i_category_id#61] Input [5]: [i_brand_id#40, i_class_id#41, i_category_id#42, sales#54, number_sales#55] -(67) Scan parquet spark_catalog.default.catalog_sales +(67) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#62, cs_quantity#63, cs_list_price#64, cs_sold_date_sk#65] Batched: true Location: InMemoryFileIndex [] @@ -492,7 +492,7 @@ Condition : (isnotnull(sales#81) AND (cast(sales#81 as decimal(32,6)) > cast(Reu Output [6]: [sales#81, number_sales#82, catalog AS channel#83, i_brand_id#69, i_class_id#70, i_category_id#71] Input [5]: [i_brand_id#69, i_class_id#70, i_category_id#71, sales#81, number_sales#82] -(83) Scan parquet spark_catalog.default.web_sales +(83) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#84, ws_quantity#85, ws_list_price#86, ws_sold_date_sk#87] Batched: true Location: InMemoryFileIndex [] @@ -615,7 +615,7 @@ Subquery:1 Hosting operator id = 65 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (114) -(105) Scan parquet spark_catalog.default.store_sales +(105) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#121, ss_list_price#122, ss_sold_date_sk#123] Batched: true Location: InMemoryFileIndex [] @@ -634,7 +634,7 @@ Arguments: [ss_sold_date_sk#123], [d_date_sk#125], Inner, BuildRight Input [4]: [ss_quantity#121, ss_list_price#122, ss_sold_date_sk#123, d_date_sk#125] Arguments: [quantity#126, list_price#127], [ss_quantity#121 AS quantity#126, ss_list_price#122 AS list_price#127] -(109) Scan parquet spark_catalog.default.catalog_sales +(109) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#128, cs_list_price#129, cs_sold_date_sk#130] Batched: true Location: InMemoryFileIndex [] @@ -653,7 +653,7 @@ Arguments: [cs_sold_date_sk#130], [d_date_sk#132], Inner, BuildRight Input [4]: [cs_quantity#128, cs_list_price#129, cs_sold_date_sk#130, d_date_sk#132] Arguments: [quantity#133, list_price#134], [cs_quantity#128 AS quantity#133, cs_list_price#129 AS list_price#134] -(113) Scan parquet spark_catalog.default.web_sales +(113) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#135, ws_list_price#136, ws_sold_date_sk#137] Batched: true Location: InMemoryFileIndex [] @@ -712,7 +712,7 @@ BroadcastExchange (126) +- CometScan parquet spark_catalog.default.date_dim (122) -(122) Scan parquet spark_catalog.default.date_dim +(122) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -742,7 +742,7 @@ BroadcastExchange (131) +- CometScan parquet spark_catalog.default.date_dim (127) -(127) Scan parquet spark_catalog.default.date_dim +(127) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#148] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q14b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q14b/explain.txt index bbdb4144f..e4a215cd2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q14b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q14b/explain.txt @@ -87,7 +87,7 @@ TakeOrderedAndProject (86) +- CometScan parquet spark_catalog.default.date_dim (73) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -99,7 +99,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -110,7 +110,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -133,7 +133,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -169,7 +169,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -245,7 +245,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -312,7 +312,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -344,7 +344,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -397,7 +397,7 @@ Results [6]: [store AS channel#55, i_brand_id#40, i_class_id#41, i_category_id#4 Input [6]: [channel#55, i_brand_id#40, i_class_id#41, i_category_id#42, sales#56, number_sales#57] Condition : (isnotnull(sales#56) AND (cast(sales#56 as decimal(32,6)) > cast(Subquery scalar-subquery#58, [id=#59] as decimal(32,6)))) -(66) Scan parquet spark_catalog.default.store_sales +(66) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#60, ss_quantity#61, ss_list_price#62, ss_sold_date_sk#63] Batched: true Location: InMemoryFileIndex [] @@ -429,7 +429,7 @@ Arguments: [ss_item_sk#60], [i_item_sk#66], Inner, BuildRight Input [8]: [ss_item_sk#60, ss_quantity#61, ss_list_price#62, ss_sold_date_sk#63, i_item_sk#66, i_brand_id#67, i_class_id#68, i_category_id#69] Arguments: [ss_quantity#61, ss_list_price#62, ss_sold_date_sk#63, i_brand_id#67, i_class_id#68, i_category_id#69], [ss_quantity#61, ss_list_price#62, ss_sold_date_sk#63, i_brand_id#67, i_class_id#68, i_category_id#69] -(73) Scan parquet spark_catalog.default.date_dim +(73) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#70, d_week_seq#71] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -518,7 +518,7 @@ Subquery:1 Hosting operator id = 65 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (96) -(87) Scan parquet spark_catalog.default.store_sales +(87) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#85, ss_list_price#86, ss_sold_date_sk#87] Batched: true Location: InMemoryFileIndex [] @@ -537,7 +537,7 @@ Arguments: [ss_sold_date_sk#87], [d_date_sk#89], Inner, BuildRight Input [4]: [ss_quantity#85, ss_list_price#86, ss_sold_date_sk#87, d_date_sk#89] Arguments: [quantity#90, list_price#91], [ss_quantity#85 AS quantity#90, ss_list_price#86 AS list_price#91] -(91) Scan parquet spark_catalog.default.catalog_sales +(91) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#92, cs_list_price#93, cs_sold_date_sk#94] Batched: true Location: InMemoryFileIndex [] @@ -556,7 +556,7 @@ Arguments: [cs_sold_date_sk#94], [d_date_sk#96], Inner, BuildRight Input [4]: [cs_quantity#92, cs_list_price#93, cs_sold_date_sk#94, d_date_sk#96] Arguments: [quantity#97, list_price#98], [cs_quantity#92 AS quantity#97, cs_list_price#93 AS list_price#98] -(95) Scan parquet spark_catalog.default.web_sales +(95) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#99, ws_list_price#100, ws_sold_date_sk#101] Batched: true Location: InMemoryFileIndex [] @@ -615,7 +615,7 @@ BroadcastExchange (108) +- CometScan parquet spark_catalog.default.date_dim (104) -(104) Scan parquet spark_catalog.default.date_dim +(104) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -646,7 +646,7 @@ Subquery:7 Hosting operator id = 104 Hosting Expression = Subquery scalar-subque +- CometScan parquet spark_catalog.default.date_dim (109) -(109) Scan parquet spark_catalog.default.date_dim +(109) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#112, d_year#113, d_moy#114, d_dom#115] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -672,7 +672,7 @@ BroadcastExchange (117) +- CometScan parquet spark_catalog.default.date_dim (113) -(113) Scan parquet spark_catalog.default.date_dim +(113) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#116] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -712,7 +712,7 @@ BroadcastExchange (122) +- CometScan parquet spark_catalog.default.date_dim (118) -(118) Scan parquet spark_catalog.default.date_dim +(118) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#70, d_week_seq#71] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -743,7 +743,7 @@ Subquery:16 Hosting operator id = 118 Hosting Expression = Subquery scalar-subqu +- CometScan parquet spark_catalog.default.date_dim (123) -(123) Scan parquet spark_catalog.default.date_dim +(123) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#117, d_year#118, d_moy#119, d_dom#120] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q15/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q15/explain.txt index a22a52dc6..fe3c7a681 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q15/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q15/explain.txt @@ -24,7 +24,7 @@ TakeOrderedAndProject (23) +- CometScan parquet spark_catalog.default.date_dim (13) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -36,7 +36,7 @@ ReadSchema: struct Input [3]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3] Condition : isnotnull(cs_bill_customer_sk#1) -(3) Scan parquet spark_catalog.default.customer +(3) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#5, c_current_addr_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -60,7 +60,7 @@ Arguments: [cs_bill_customer_sk#1], [c_customer_sk#5], Inner, BuildRight Input [5]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3, c_customer_sk#5, c_current_addr_sk#6] Arguments: [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6], [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6] -(8) Scan parquet spark_catalog.default.customer_address +(8) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#7, ca_state#8, ca_zip#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -84,7 +84,7 @@ Arguments: [c_current_addr_sk#6], [ca_address_sk#7], Inner, ((substr(ca_zip#9, 1 Input [6]: [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6, ca_address_sk#7, ca_state#8, ca_zip#9] Arguments: [cs_sales_price#2, cs_sold_date_sk#3, ca_zip#9], [cs_sales_price#2, cs_sold_date_sk#3, ca_zip#9] -(13) Scan parquet spark_catalog.default.date_dim +(13) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#11, d_qoy#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -147,7 +147,7 @@ BroadcastExchange (28) +- CometScan parquet spark_catalog.default.date_dim (24) -(24) Scan parquet spark_catalog.default.date_dim +(24) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#11, d_qoy#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q16/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q16/explain.txt index ff0ac2342..1ff243ff2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q16/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q16/explain.txt @@ -41,7 +41,7 @@ +- CometScan parquet spark_catalog.default.call_center (29) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [8]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, cs_sold_date_sk#8] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -64,7 +64,7 @@ Arguments: hashpartitioning(cs_order_number#5, 5), ENSURE_REQUIREMENTS, CometNat Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Arguments: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_order_number#5 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.catalog_sales +(6) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_warehouse_sk#9, cs_order_number#10, cs_sold_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -91,7 +91,7 @@ Arguments: [cs_order_number#5], [cs_order_number#10], LeftSemi, NOT (cs_warehous Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Arguments: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(12) Scan parquet spark_catalog.default.catalog_returns +(12) CometScan parquet spark_catalog.default.catalog_returns Output [2]: [cr_order_number#12, cr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -114,7 +114,7 @@ Left output [6]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_ Right output [1]: [cr_order_number#12] Arguments: [cs_order_number#5], [cr_order_number#12], LeftAnti -(17) Scan parquet spark_catalog.default.date_dim +(17) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -142,7 +142,7 @@ Arguments: [cs_ship_date_sk#1], [d_date_sk#14], Inner, BuildRight Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, d_date_sk#14] Arguments: [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(23) Scan parquet spark_catalog.default.customer_address +(23) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_state#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -170,7 +170,7 @@ Arguments: [cs_ship_addr_sk#2], [ca_address_sk#16], Inner, BuildRight Input [6]: [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, ca_address_sk#16] Arguments: [cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(29) Scan parquet spark_catalog.default.call_center +(29) CometScan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#18, cc_county#19] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q17/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q17/explain.txt index d3739ed75..dab3dfbf3 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q17/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q17/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- CometScan parquet spark_catalog.default.item (33) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_quantity#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -55,7 +55,7 @@ ReadSchema: struct Input [2]: [ws_ext_sales_price#1, ws_sold_date_sk#2] Arguments: [sold_date_sk#3, sales_price#4], [ws_sold_date_sk#2 AS sold_date_sk#3, ws_ext_sales_price#1 AS sales_price#4] -(3) Scan parquet spark_catalog.default.catalog_sales +(3) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ext_sales_price#5, cs_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -63,7 +63,7 @@ Arguments: [sold_date_sk#7, sales_price#8], [cs_sold_date_sk#6 AS sold_date_sk#7 Child 0 Input [2]: [sold_date_sk#3, sales_price#4] Child 1 Input [2]: [sold_date_sk#7, sales_price#8] -(6) Scan parquet spark_catalog.default.date_dim +(6) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_week_seq#10, d_day_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -108,7 +108,7 @@ Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sal Aggregate Attributes [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sales_price#4 END))#26, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Monday ) THEN sales_price#4 END))#27, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Tuesday ) THEN sales_price#4 END))#28, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Wednesday) THEN sales_price#4 END))#29, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Thursday ) THEN sales_price#4 END))#30, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Friday ) THEN sales_price#4 END))#31, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Saturday ) THEN sales_price#4 END))#32] Results [8]: [d_week_seq#10, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sales_price#4 END))#26,17,2) AS sun_sales#33, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Monday ) THEN sales_price#4 END))#27,17,2) AS mon_sales#34, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Tuesday ) THEN sales_price#4 END))#28,17,2) AS tue_sales#35, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Wednesday) THEN sales_price#4 END))#29,17,2) AS wed_sales#36, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Thursday ) THEN sales_price#4 END))#30,17,2) AS thu_sales#37, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Friday ) THEN sales_price#4 END))#31,17,2) AS fri_sales#38, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Saturday ) THEN sales_price#4 END))#32,17,2) AS sat_sales#39] -(15) Scan parquet spark_catalog.default.date_dim +(15) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_week_seq#40, d_year#41] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -150,7 +150,7 @@ Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#58 = Sunday ) THEN sal Aggregate Attributes [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#58 = Sunday ) THEN sales_price#59 END))#26, sum(UnscaledValue(CASE WHEN (d_day_name#58 = Monday ) THEN sales_price#59 END))#27, sum(UnscaledValue(CASE WHEN (d_day_name#58 = Tuesday ) THEN sales_price#59 END))#28, sum(UnscaledValue(CASE WHEN (d_day_name#58 = Wednesday) THEN sales_price#59 END))#29, sum(UnscaledValue(CASE WHEN (d_day_name#58 = Thursday ) THEN sales_price#59 END))#30, sum(UnscaledValue(CASE WHEN (d_day_name#58 = Friday ) THEN sales_price#59 END))#31, sum(UnscaledValue(CASE WHEN (d_day_name#58 = Saturday ) THEN sales_price#59 END))#32] Results [8]: [d_week_seq#50, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#58 = Sunday ) THEN sales_price#59 END))#26,17,2) AS sun_sales#60, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#58 = Monday ) THEN sales_price#59 END))#27,17,2) AS mon_sales#61, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#58 = Tuesday ) THEN sales_price#59 END))#28,17,2) AS tue_sales#62, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#58 = Wednesday) THEN sales_price#59 END))#29,17,2) AS wed_sales#63, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#58 = Thursday ) THEN sales_price#59 END))#30,17,2) AS thu_sales#64, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#58 = Friday ) THEN sales_price#59 END))#31,17,2) AS fri_sales#65, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#58 = Saturday ) THEN sales_price#59 END))#32,17,2) AS sat_sales#66] -(24) Scan parquet spark_catalog.default.date_dim +(24) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_week_seq#67, d_year#68] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q20/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q20/explain.txt index 4bb7f5ca5..9eb32de68 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q20/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q20/explain.txt @@ -23,7 +23,7 @@ TakeOrderedAndProject (22) +- CometScan parquet spark_catalog.default.date_dim (8) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -35,7 +35,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Condition : isnotnull(cs_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [cs_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -138,7 +138,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q21/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q21/explain.txt index de5cc9519..80959f06f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q21/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q21/explain.txt @@ -25,7 +25,7 @@ TakeOrderedAndProject (24) +- CometScan parquet spark_catalog.default.date_dim (14) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ ReadSchema: struct -(2) Scan parquet spark_catalog.default.store_sales +(2) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -86,7 +86,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#7, ss_sold_date_sk#8] Condition : isnotnull(ss_item_sk#7) -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -114,7 +114,7 @@ Arguments: [ss_sold_date_sk#8], [d_date_sk#10], Inner, BuildRight Input [4]: [ss_item_sk#7, ss_sold_date_sk#8, d_date_sk#10, d_date#11] Arguments: [ss_item_sk#7, d_date#11], [ss_item_sk#7, d_date#11] -(10) Scan parquet spark_catalog.default.item +(10) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#13, i_item_desc#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -184,7 +184,7 @@ Arguments: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_ (25) ColumnarToRow [codegen id : 1] Input [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] -(26) Scan parquet spark_catalog.default.store_sales +(26) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -199,7 +199,7 @@ Condition : isnotnull(ss_customer_sk#19) Input [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Arguments: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21], [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21] -(29) Scan parquet spark_catalog.default.customer +(29) CometScan parquet spark_catalog.default.customer Output [1]: [c_customer_sk#23] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -279,7 +279,7 @@ Join condition: None Output [1]: [(cast(cs_quantity#3 as decimal(10,0)) * cs_list_price#4) AS sales#33] Input [4]: [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5, d_date_sk#32] -(46) Scan parquet spark_catalog.default.web_sales +(46) CometScan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#34, ws_bill_customer_sk#35, ws_quantity#36, ws_list_price#37, ws_sold_date_sk#38] Batched: true Location: InMemoryFileIndex [] @@ -384,7 +384,7 @@ BroadcastExchange (71) +- CometScan parquet spark_catalog.default.date_dim (67) -(67) Scan parquet spark_catalog.default.date_dim +(67) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#32, d_year#55, d_moy#56] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -414,7 +414,7 @@ BroadcastExchange (76) +- CometScan parquet spark_catalog.default.date_dim (72) -(72) Scan parquet spark_catalog.default.date_dim +(72) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -457,7 +457,7 @@ Subquery:3 Hosting operator id = 38 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (82) -(77) Scan parquet spark_catalog.default.store_sales +(77) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#57, ss_quantity#58, ss_sales_price#59, ss_sold_date_sk#60] Batched: true Location: InMemoryFileIndex [] @@ -481,7 +481,7 @@ Arguments: [ss_customer_sk#57], [c_customer_sk#62], Inner, BuildRight Input [5]: [ss_customer_sk#57, ss_quantity#58, ss_sales_price#59, ss_sold_date_sk#60, c_customer_sk#62] Arguments: [ss_quantity#58, ss_sales_price#59, ss_sold_date_sk#60, c_customer_sk#62], [ss_quantity#58, ss_sales_price#59, ss_sold_date_sk#60, c_customer_sk#62] -(82) Scan parquet spark_catalog.default.date_dim +(82) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#63, d_year#64] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -556,7 +556,7 @@ BroadcastExchange (99) +- CometScan parquet spark_catalog.default.date_dim (95) -(95) Scan parquet spark_catalog.default.date_dim +(95) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#63, d_year#64] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q23b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q23b/explain.txt index 4e2bbdace..56684d343 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q23b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q23b/explain.txt @@ -88,7 +88,7 @@ TakeOrderedAndProject (87) +- ReusedExchange (80) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [5]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#7, ss_sold_date_sk#8] Condition : isnotnull(ss_item_sk#7) -(5) Scan parquet spark_catalog.default.date_dim +(5) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -140,7 +140,7 @@ Arguments: [ss_sold_date_sk#8], [d_date_sk#10], Inner, BuildRight Input [4]: [ss_item_sk#7, ss_sold_date_sk#8, d_date_sk#10, d_date#11] Arguments: [ss_item_sk#7, d_date#11], [ss_item_sk#7, d_date#11] -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#13, i_item_desc#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -210,7 +210,7 @@ Arguments: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_ (26) ColumnarToRow [codegen id : 1] Input [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] -(27) Scan parquet spark_catalog.default.store_sales +(27) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -225,7 +225,7 @@ Condition : isnotnull(ss_customer_sk#19) Input [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Arguments: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21], [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21] -(30) Scan parquet spark_catalog.default.customer +(30) CometScan parquet spark_catalog.default.customer Output [1]: [c_customer_sk#23] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -288,7 +288,7 @@ Right keys [1]: [c_customer_sk#23] Join type: LeftSemi Join condition: None -(43) Scan parquet spark_catalog.default.customer +(43) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#32, c_first_name#33, c_last_name#34] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -383,7 +383,7 @@ Functions [1]: [sum((cast(cs_quantity#3 as decimal(10,0)) * cs_list_price#4))] Aggregate Attributes [1]: [sum((cast(cs_quantity#3 as decimal(10,0)) * cs_list_price#4))#40] Results [3]: [c_last_name#34, c_first_name#33, sum((cast(cs_quantity#3 as decimal(10,0)) * cs_list_price#4))#40 AS sales#41] -(63) Scan parquet spark_catalog.default.web_sales +(63) CometScan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#42, ws_bill_customer_sk#43, ws_quantity#44, ws_list_price#45, ws_sold_date_sk#46] Batched: true Location: InMemoryFileIndex [] @@ -506,7 +506,7 @@ BroadcastExchange (92) +- CometScan parquet spark_catalog.default.date_dim (88) -(88) Scan parquet spark_catalog.default.date_dim +(88) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#35, d_year#65, d_moy#66] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -536,7 +536,7 @@ BroadcastExchange (97) +- CometScan parquet spark_catalog.default.date_dim (93) -(93) Scan parquet spark_catalog.default.date_dim +(93) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -579,7 +579,7 @@ Subquery:3 Hosting operator id = 39 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (103) -(98) Scan parquet spark_catalog.default.store_sales +(98) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#67, ss_quantity#68, ss_sales_price#69, ss_sold_date_sk#70] Batched: true Location: InMemoryFileIndex [] @@ -603,7 +603,7 @@ Arguments: [ss_customer_sk#67], [c_customer_sk#72], Inner, BuildRight Input [5]: [ss_customer_sk#67, ss_quantity#68, ss_sales_price#69, ss_sold_date_sk#70, c_customer_sk#72] Arguments: [ss_quantity#68, ss_sales_price#69, ss_sold_date_sk#70, c_customer_sk#72], [ss_quantity#68, ss_sales_price#69, ss_sold_date_sk#70, c_customer_sk#72] -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#73, d_year#74] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -678,7 +678,7 @@ BroadcastExchange (120) +- CometScan parquet spark_catalog.default.date_dim (116) -(116) Scan parquet spark_catalog.default.date_dim +(116) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#73, d_year#74] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24a/explain.txt index 3094c4e07..9dbf4af83 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24a/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.customer_address (30) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -66,7 +66,7 @@ Arguments: hashpartitioning(ss_ticket_number#4, ss_item_sk#1, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5], [ss_ticket_number#4 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.store_returns +(6) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#7, sr_ticket_number#8, sr_returned_date_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -98,7 +98,7 @@ Arguments: [ss_ticket_number#4, ss_item_sk#1], [sr_ticket_number#8, sr_item_sk#7 Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, sr_item_sk#7, sr_ticket_number#8] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5], [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [5]: [s_store_sk#10, s_store_name#11, s_market_id#12, s_state#13, s_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -126,7 +126,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#10], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14], [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -150,7 +150,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#15], Inner, BuildRight Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Arguments: [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20], [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] -(24) Scan parquet spark_catalog.default.customer +(24) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#21, c_first_name#22, c_last_name#23, c_birth_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -177,7 +177,7 @@ Arguments: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_pric (29) ColumnarToRow [codegen id : 2] Input [12]: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20, c_first_name#22, c_last_name#23, c_birth_country#24] -(30) Scan parquet spark_catalog.default.customer_address +(30) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_state#25, ca_zip#26, ca_country#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -312,7 +312,7 @@ Arguments: [ss_store_sk#42], [s_store_sk#47], Inner, BuildRight Input [8]: [ss_item_sk#40, ss_customer_sk#41, ss_store_sk#42, ss_net_paid#44, s_store_sk#47, s_store_name#48, s_state#49, s_zip#50] Arguments: [ss_item_sk#40, ss_customer_sk#41, ss_net_paid#44, s_store_name#48, s_state#49, s_zip#50], [ss_item_sk#40, ss_customer_sk#41, ss_net_paid#44, s_store_name#48, s_state#49, s_zip#50] -(52) Scan parquet spark_catalog.default.item +(52) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#51, i_current_price#52, i_size#53, i_color#54, i_units#55, i_manager_id#56] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24b/explain.txt index 846c7a14a..f27ae4019 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24b/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.customer_address (30) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -66,7 +66,7 @@ Arguments: hashpartitioning(ss_ticket_number#4, ss_item_sk#1, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5], [ss_ticket_number#4 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.store_returns +(6) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#7, sr_ticket_number#8, sr_returned_date_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -98,7 +98,7 @@ Arguments: [ss_ticket_number#4, ss_item_sk#1], [sr_ticket_number#8, sr_item_sk#7 Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, sr_item_sk#7, sr_ticket_number#8] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5], [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [5]: [s_store_sk#10, s_store_name#11, s_market_id#12, s_state#13, s_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -126,7 +126,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#10], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14], [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -150,7 +150,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#15], Inner, BuildRight Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Arguments: [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20], [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] -(24) Scan parquet spark_catalog.default.customer +(24) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#21, c_first_name#22, c_last_name#23, c_birth_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -177,7 +177,7 @@ Arguments: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_pric (29) ColumnarToRow [codegen id : 2] Input [12]: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20, c_first_name#22, c_last_name#23, c_birth_country#24] -(30) Scan parquet spark_catalog.default.customer_address +(30) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_state#25, ca_zip#26, ca_country#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -312,7 +312,7 @@ Arguments: [ss_store_sk#42], [s_store_sk#47], Inner, BuildRight Input [8]: [ss_item_sk#40, ss_customer_sk#41, ss_store_sk#42, ss_net_paid#44, s_store_sk#47, s_store_name#48, s_state#49, s_zip#50] Arguments: [ss_item_sk#40, ss_customer_sk#41, ss_net_paid#44, s_store_name#48, s_state#49, s_zip#50], [ss_item_sk#40, ss_customer_sk#41, ss_net_paid#44, s_store_name#48, s_state#49, s_zip#50] -(52) Scan parquet spark_catalog.default.item +(52) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#51, i_current_price#52, i_size#53, i_color#54, i_units#55, i_manager_id#56] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q25/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q25/explain.txt index 0ea8e3ef4..35ca89139 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q25/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q25/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- CometScan parquet spark_catalog.default.item (33) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_profit#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -55,7 +55,7 @@ ReadSchema: struct (avg(ctr_total_ret Output [2]: [ctr_customer_sk#13, ctr_total_return#15] Input [5]: [ctr_customer_sk#13, ctr_state#14, ctr_total_return#15, (avg(ctr_total_return) * 1.2)#33, ctr_state#26] -(38) Scan parquet spark_catalog.default.customer +(38) CometScan parquet spark_catalog.default.customer Output [14]: [c_customer_sk#34, c_customer_id#35, c_current_addr_sk#36, c_salutation#37, c_first_name#38, c_last_name#39, c_preferred_cust_flag#40, c_birth_day#41, c_birth_month#42, c_birth_year#43, c_birth_country#44, c_login#45, c_email_address#46, c_last_review_date#47] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -262,7 +262,7 @@ Join condition: None Output [14]: [ctr_total_return#15, c_customer_id#35, c_current_addr_sk#36, c_salutation#37, c_first_name#38, c_last_name#39, c_preferred_cust_flag#40, c_birth_day#41, c_birth_month#42, c_birth_year#43, c_birth_country#44, c_login#45, c_email_address#46, c_last_review_date#47] Input [16]: [ctr_customer_sk#13, ctr_total_return#15, c_customer_sk#34, c_customer_id#35, c_current_addr_sk#36, c_salutation#37, c_first_name#38, c_last_name#39, c_preferred_cust_flag#40, c_birth_day#41, c_birth_month#42, c_birth_year#43, c_birth_country#44, c_login#45, c_email_address#46, c_last_review_date#47] -(44) Scan parquet spark_catalog.default.customer_address +(44) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#48, ca_state#49] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -308,7 +308,7 @@ BroadcastExchange (56) +- CometScan parquet spark_catalog.default.date_dim (52) -(52) Scan parquet spark_catalog.default.date_dim +(52) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_year#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q31/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q31/explain.txt index e502bf69f..ed4a6da89 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q31/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q31/explain.txt @@ -97,7 +97,7 @@ +- ReusedExchange (84) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -109,7 +109,7 @@ ReadSchema: struct Input [3]: [ss_addr_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_addr_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -133,7 +133,7 @@ Arguments: [ss_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [6]: [ss_addr_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, d_date_sk#5, d_year#6, d_qoy#7] Arguments: [ss_addr_sk#1, ss_ext_sales_price#2, d_year#6, d_qoy#7], [ss_addr_sk#1, ss_ext_sales_price#2, d_year#6, d_qoy#7] -(8) Scan parquet spark_catalog.default.customer_address +(8) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#8, ca_county#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -178,7 +178,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#2))] Aggregate Attributes [1]: [sum(UnscaledValue(ss_ext_sales_price#2))#12] Results [3]: [ca_county#9, d_year#6, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#2))#12,17,2) AS store_sales#13] -(17) Scan parquet spark_catalog.default.store_sales +(17) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#14, ss_ext_sales_price#15, ss_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -190,7 +190,7 @@ ReadSchema: struct Input [3]: [ss_addr_sk#14, ss_ext_sales_price#15, ss_sold_date_sk#16] Condition : isnotnull(ss_addr_sk#14) -(19) Scan parquet spark_catalog.default.date_dim +(19) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#18, d_year#19, d_qoy#20] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -257,7 +257,7 @@ Right keys [1]: [ca_county#22] Join type: Inner Join condition: None -(33) Scan parquet spark_catalog.default.store_sales +(33) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#26, ss_ext_sales_price#27, ss_sold_date_sk#28] Batched: true Location: InMemoryFileIndex [] @@ -269,7 +269,7 @@ ReadSchema: struct Input [3]: [ss_addr_sk#26, ss_ext_sales_price#27, ss_sold_date_sk#28] Condition : isnotnull(ss_addr_sk#26) -(35) Scan parquet spark_catalog.default.date_dim +(35) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#30, d_year#31, d_qoy#32] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -340,7 +340,7 @@ Join condition: None Output [5]: [ca_county#9, d_year#6, store_sales#13, store_sales#25, store_sales#37] Input [7]: [ca_county#9, d_year#6, store_sales#13, ca_county#22, store_sales#25, ca_county#34, store_sales#37] -(50) Scan parquet spark_catalog.default.web_sales +(50) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#38, ws_ext_sales_price#39, ws_sold_date_sk#40] Batched: true Location: InMemoryFileIndex [] @@ -407,7 +407,7 @@ Right keys [1]: [ca_county#46] Join type: Inner Join condition: None -(64) Scan parquet spark_catalog.default.web_sales +(64) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#51, ws_ext_sales_price#52, ws_sold_date_sk#53] Batched: true Location: InMemoryFileIndex [] @@ -478,7 +478,7 @@ Join condition: (CASE WHEN (web_sales#50 > 0.00) THEN (web_sales#62 / web_sales# Output [8]: [ca_county#9, d_year#6, store_sales#13, store_sales#25, store_sales#37, ca_county#46, web_sales#50, web_sales#62] Input [9]: [ca_county#9, d_year#6, store_sales#13, store_sales#25, store_sales#37, ca_county#46, web_sales#50, ca_county#59, web_sales#62] -(79) Scan parquet spark_catalog.default.web_sales +(79) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#63, ws_ext_sales_price#64, ws_sold_date_sk#65] Batched: true Location: InMemoryFileIndex [] @@ -569,7 +569,7 @@ BroadcastExchange (100) +- CometScan parquet spark_catalog.default.date_dim (97) -(97) Scan parquet spark_catalog.default.date_dim +(97) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -594,7 +594,7 @@ BroadcastExchange (104) +- CometScan parquet spark_catalog.default.date_dim (101) -(101) Scan parquet spark_catalog.default.date_dim +(101) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#18, d_year#19, d_qoy#20] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -619,7 +619,7 @@ BroadcastExchange (108) +- CometScan parquet spark_catalog.default.date_dim (105) -(105) Scan parquet spark_catalog.default.date_dim +(105) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#30, d_year#31, d_qoy#32] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q32/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q32/explain.txt index fe9e7d49a..8fe97f412 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q32/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q32/explain.txt @@ -32,7 +32,7 @@ +- ReusedExchange (26) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -44,7 +44,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3] Condition : (isnotnull(cs_item_sk#1) AND isnotnull(cs_ext_discount_amt#2)) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_manufact_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -75,7 +75,7 @@ Arguments: [cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5], [cs_ext_disc (9) ColumnarToRow [codegen id : 4] Input [3]: [cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5] -(10) Scan parquet spark_catalog.default.catalog_sales +(10) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#7, cs_ext_discount_amt#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -87,7 +87,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#7, cs_ext_discount_amt#8, cs_sold_date_sk#9] Condition : isnotnull(cs_item_sk#7) -(12) Scan parquet spark_catalog.default.date_dim +(12) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -195,7 +195,7 @@ BroadcastExchange (36) +- CometScan parquet spark_catalog.default.date_dim (32) -(32) Scan parquet spark_catalog.default.date_dim +(32) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#19, d_date#24] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q33/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q33/explain.txt index 4c00423c0..2558133de 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q33/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q33/explain.txt @@ -64,7 +64,7 @@ TakeOrderedAndProject (63) +- ReusedExchange (52) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -76,7 +76,7 @@ ReadSchema: struct Input [2]: [i_item_sk#11, i_manufact_id#12] Condition : isnotnull(i_item_sk#11) -(17) Scan parquet spark_catalog.default.item +(17) CometScan parquet spark_catalog.default.item Output [2]: [i_category#13, i_manufact_id#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -201,7 +201,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] Aggregate Attributes [1]: [sum(UnscaledValue(ss_ext_sales_price#3))#17] Results [2]: [i_manufact_id#12, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#3))#17,17,2) AS total_sales#18] -(29) Scan parquet spark_catalog.default.catalog_sales +(29) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#19, cs_item_sk#20, cs_ext_sales_price#21, cs_sold_date_sk#22] Batched: true Location: InMemoryFileIndex [] @@ -270,7 +270,7 @@ Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#21))] Aggregate Attributes [1]: [sum(UnscaledValue(cs_ext_sales_price#21))#30] Results [2]: [i_manufact_id#27, MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#21))#30,17,2) AS total_sales#31] -(44) Scan parquet spark_catalog.default.web_sales +(44) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#32, ws_bill_addr_sk#33, ws_ext_sales_price#34, ws_sold_date_sk#35] Batched: true Location: InMemoryFileIndex [] @@ -373,7 +373,7 @@ BroadcastExchange (68) +- CometScan parquet spark_catalog.default.date_dim (64) -(64) Scan parquet spark_catalog.default.date_dim +(64) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#7, d_moy#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q34/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q34/explain.txt index 7cfddd6a7..3223f7c72 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q34/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q34/explain.txt @@ -33,7 +33,7 @@ +- CometScan parquet spark_catalog.default.customer (25) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -45,7 +45,7 @@ ReadSchema: struct= 15) AND (cnt#17 <= 20)) -(25) Scan parquet spark_catalog.default.customer +(25) CometScan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -192,7 +192,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_dom#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q35/explain.txt index c06c1dd16..c1e19555c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q35/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q35/explain.txt @@ -45,7 +45,7 @@ TakeOrderedAndProject (44) +- CometScan parquet spark_catalog.default.customer_demographics (35) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -56,14 +56,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#12, ws_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ Right keys [1]: [ws_bill_customer_sk#12] Join type: ExistenceJoin(exists#2) Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -175,7 +175,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(29) Scan parquet spark_catalog.default.customer_address +(29) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_state#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -203,7 +203,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#4, ca_state#21] Input [4]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#20, ca_state#21] -(35) Scan parquet spark_catalog.default.customer_demographics +(35) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_dep_count#25, cd_dep_employed_count#26, cd_dep_college_count#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q36/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q36/explain.txt index 1fdba8689..2478e2e0e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q36/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q36/explain.txt @@ -30,7 +30,7 @@ TakeOrderedAndProject (29) +- CometScan parquet spark_catalog.default.store (14) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -42,7 +42,7 @@ ReadSchema: struct= 68.00)) A Input [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(4) Scan parquet spark_catalog.default.inventory +(4) CometScan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#6, inv_quantity_on_hand#7, inv_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -71,7 +71,7 @@ Arguments: [i_item_sk#1], [inv_item_sk#6], Inner, BuildRight Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item_sk#6, inv_date_sk#8] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(17) Scan parquet spark_catalog.default.catalog_sales +(17) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_item_sk#12, cs_sold_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -158,7 +158,7 @@ BroadcastExchange (31) +- CometScan parquet spark_catalog.default.date_dim (27) -(27) Scan parquet spark_catalog.default.date_dim +(27) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q38/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q38/explain.txt index 598dccaf0..c0e7300df 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q38/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q38/explain.txt @@ -48,7 +48,7 @@ +- ReusedExchange (35) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ ReadSchema: struct Input [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Condition : isnotnull(ss_customer_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -88,7 +88,7 @@ Arguments: [ss_sold_date_sk#2], [d_date_sk#4], Inner, BuildRight Input [4]: [ss_customer_sk#1, ss_sold_date_sk#2, d_date_sk#4, d_date#5] Arguments: [ss_customer_sk#1, d_date#5], [ss_customer_sk#1, d_date#5] -(9) Scan parquet spark_catalog.default.customer +(9) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#7, c_first_name#8, c_last_name#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -126,7 +126,7 @@ Input [3]: [c_last_name#9, c_first_name#8, d_date#5] Keys [3]: [c_last_name#9, c_first_name#8, d_date#5] Functions: [] -(17) Scan parquet spark_catalog.default.catalog_sales +(17) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_bill_customer_sk#10, cs_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -185,7 +185,7 @@ Left output [3]: [c_last_name#9, c_first_name#8, d_date#5] Right output [3]: [c_last_name#17, c_first_name#16, d_date#14] Arguments: [coalesce(c_last_name#9, ), isnull(c_last_name#9), coalesce(c_first_name#8, ), isnull(c_first_name#8), coalesce(d_date#5, 1970-01-01), isnull(d_date#5)], [coalesce(c_last_name#17, ), isnull(c_last_name#17), coalesce(c_first_name#16, ), isnull(c_first_name#16), coalesce(d_date#14, 1970-01-01), isnull(d_date#14)], LeftSemi, BuildRight -(30) Scan parquet spark_catalog.default.web_sales +(30) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#18, ws_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -274,7 +274,7 @@ BroadcastExchange (52) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q39a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q39a/explain.txt index ac784651c..683619b82 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q39a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q39a/explain.txt @@ -50,7 +50,7 @@ +- CometScan parquet spark_catalog.default.date_dim (33) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -62,7 +62,7 @@ ReadSchema: struct 0.000000)) -(18) Scan parquet spark_catalog.default.customer +(18) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#25, c_customer_id#26, c_first_name#27, c_last_name#28, c_preferred_cust_flag#29, c_birth_country#30, c_login#31, c_email_address#32] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -203,7 +203,7 @@ ReadSchema: struct 0.000000) THEN (year_total#100 / yea Output [10]: [customer_id#23, customer_id#46, customer_first_name#47, customer_last_name#48, customer_preferred_cust_flag#49, customer_birth_country#50, customer_login#51, customer_email_address#52, year_total#77, year_total#100] Input [13]: [customer_id#23, year_total#24, customer_id#46, customer_first_name#47, customer_last_name#48, customer_preferred_cust_flag#49, customer_birth_country#50, customer_login#51, customer_email_address#52, year_total#53, year_total#77, customer_id#99, year_total#100] -(71) Scan parquet spark_catalog.default.customer +(71) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#101, c_customer_id#102, c_first_name#103, c_last_name#104, c_preferred_cust_flag#105, c_birth_country#106, c_login#107, c_email_address#108] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -464,7 +464,7 @@ ReadSchema: struct= 738)) AND (i_m Input [3]: [i_manufact_id#1, i_manufact#2, i_product_name#3] Arguments: [i_manufact#2, i_product_name#3], [i_manufact#2, i_product_name#3] -(4) Scan parquet spark_catalog.default.item +(4) CometScan parquet spark_catalog.default.item Output [5]: [i_category#4, i_manufact#5, i_size#6, i_color#7, i_units#8] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q42/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q42/explain.txt index a43b8716f..724b1e884 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q42/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q42/explain.txt @@ -20,7 +20,7 @@ TakeOrderedAndProject (19) +- CometScan parquet spark_catalog.default.item (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((((isnotnull(d_moy#3) AND isnotnull(d_year#2)) AND (d_moy#3 = 11)) Input [3]: [d_date_sk#1, d_year#2, d_moy#3] Arguments: [d_date_sk#1, d_year#2], [d_date_sk#1, d_year#2] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [5]: [d_date_sk#1, d_year#2, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Arguments: [d_year#2, ss_item_sk#4, ss_ext_sales_price#5], [d_year#2, ss_item_sk#4, ss_ext_sales_price#5] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_category_id#8, i_category#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q43/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q43/explain.txt index abe134d26..016788b64 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q43/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q43/explain.txt @@ -20,7 +20,7 @@ TakeOrderedAndProject (19) +- CometScan parquet spark_catalog.default.store (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_day_name#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((isnotnull(d_year#2) AND (d_year#2 = 2000)) AND isnotnull(d_date_sk Input [3]: [d_date_sk#1, d_year#2, d_day_name#3] Arguments: [d_date_sk#1, d_day_name#3], [d_date_sk#1, d_day_name#3] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [5]: [d_date_sk#1, d_day_name#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Arguments: [d_day_name#3, ss_store_sk#4, ss_sales_price#5], [d_day_name#3, ss_store_sk#4, ss_sales_price#5] -(9) Scan parquet spark_catalog.default.store +(9) CometScan parquet spark_catalog.default.store Output [4]: [s_store_sk#7, s_store_id#8, s_store_name#9, s_gmt_offset#10] Batched: true Location [not included in comparison]/{warehouse_dir}/store] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q44/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q44/explain.txt index 812f9f391..c9e28476b 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q44/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q44/explain.txt @@ -44,7 +44,7 @@ TakeOrderedAndProject (43) +- ReusedExchange (40) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_store_sk#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -188,7 +188,7 @@ Join condition: None Output [3]: [item_sk#10, rnk#14, item_sk#20] Input [4]: [item_sk#10, rnk#14, item_sk#20, rnk#22] -(34) Scan parquet spark_catalog.default.item +(34) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#23, i_product_name#24] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -245,7 +245,7 @@ Subquery:1 Hosting operator id = 8 Hosting Expression = Subquery scalar-subquery +- CometScan parquet spark_catalog.default.store_sales (44) -(44) Scan parquet spark_catalog.default.store_sales +(44) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_addr_sk#29, ss_store_sk#30, ss_net_profit#31, ss_sold_date_sk#32] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q45/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q45/explain.txt index f128499e3..962a51203 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q45/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q45/explain.txt @@ -37,7 +37,7 @@ TakeOrderedAndProject (36) +- CometScan parquet spark_catalog.default.item (25) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#2, ws_bill_customer_sk#3, ws_sales_price#4, ws_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -49,7 +49,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.store_sales +(3) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -82,7 +82,7 @@ Arguments: [i_item_sk#1], [ss_item_sk#4], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -106,7 +106,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -261,7 +261,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q48/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q48/explain.txt index 5197d99f8..15b16c1e8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q48/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q48/explain.txt @@ -29,7 +29,7 @@ +- CometScan parquet spark_catalog.default.date_dim (19) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [7]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_store_sk#3, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -41,7 +41,7 @@ ReadSchema: struct= 100.00) AND (ss_sales_price#5 <= 150.00)) OR ((ss_sales_price#5 >= 50.00) AND (ss_sales_price#5 <= 100.00))) OR ((ss_sales_price#5 >= 150.00) AND (ss_sales_price#5 <= 200.00)))) AND ((((ss_net_profit#6 >= 0.00) AND (ss_net_profit#6 <= 2000.00)) OR ((ss_net_profit#6 >= 150.00) AND (ss_net_profit#6 <= 3000.00))) OR ((ss_net_profit#6 >= 50.00) AND (ss_net_profit#6 <= 25000.00)))) -(3) Scan parquet spark_catalog.default.store +(3) CometScan parquet spark_catalog.default.store Output [1]: [s_store_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -65,7 +65,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#9], Inner, BuildRight Input [8]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_store_sk#3, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, s_store_sk#9] Arguments: [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7], [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.customer_demographics +(8) CometScan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#10, cd_marital_status#11, cd_education_status#12] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -89,7 +89,7 @@ Arguments: [ss_cdemo_sk#1], [cd_demo_sk#10], Inner, ((((((cd_marital_status#11 = Input [9]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, cd_demo_sk#10, cd_marital_status#11, cd_education_status#12] Arguments: [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7], [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7] -(13) Scan parquet spark_catalog.default.customer_address +(13) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#13, ca_state#14, ca_country#15] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -117,7 +117,7 @@ Arguments: [ss_addr_sk#2], [ca_address_sk#13], Inner, ((((ca_state#14 IN (CO,OH, Input [6]: [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7, ca_address_sk#13, ca_state#14] Arguments: [ss_quantity#4, ss_sold_date_sk#7], [ss_quantity#4, ss_sold_date_sk#7] -(19) Scan parquet spark_catalog.default.date_dim +(19) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -176,7 +176,7 @@ BroadcastExchange (33) +- CometScan parquet spark_catalog.default.date_dim (29) -(29) Scan parquet spark_catalog.default.date_dim +(29) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q49/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q49/explain.txt index 124a22ef9..205e71071 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q49/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q49/explain.txt @@ -78,7 +78,7 @@ TakeOrderedAndProject (77) +- ReusedExchange (59) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [6]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_net_profit#5, ws_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_so Input [5]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] -(5) Scan parquet spark_catalog.default.web_returns +(5) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11, wr_returned_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -122,7 +122,7 @@ Arguments: [ws_order_number#2, ws_item_sk#1], [wr_order_number#9, wr_item_sk#8], Input [9]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11] Arguments: [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11], [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -199,7 +199,7 @@ Condition : ((return_rank#35 <= 10) OR (currency_rank#36 <= 10)) Output [5]: [web AS channel#37, item#32, return_ratio#33, return_rank#35, currency_rank#36] Input [5]: [item#32, return_ratio#33, currency_ratio#34, return_rank#35, currency_rank#36] -(27) Scan parquet spark_catalog.default.catalog_sales +(27) CometScan parquet spark_catalog.default.catalog_sales Output [6]: [cs_item_sk#38, cs_order_number#39, cs_quantity#40, cs_net_paid#41, cs_net_profit#42, cs_sold_date_sk#43] Batched: true Location: InMemoryFileIndex [] @@ -219,7 +219,7 @@ Arguments: [cs_item_sk#38, cs_order_number#39, cs_quantity#40, cs_net_paid#41, c Input [5]: [cs_item_sk#38, cs_order_number#39, cs_quantity#40, cs_net_paid#41, cs_sold_date_sk#43] Arguments: [cs_item_sk#38, cs_order_number#39, cs_quantity#40, cs_net_paid#41, cs_sold_date_sk#43] -(31) Scan parquet spark_catalog.default.catalog_returns +(31) CometScan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#45, cr_order_number#46, cr_return_quantity#47, cr_return_amount#48, cr_returned_date_sk#49] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -304,7 +304,7 @@ Condition : ((return_rank#70 <= 10) OR (currency_rank#71 <= 10)) Output [5]: [catalog AS channel#72, item#67, return_ratio#68, return_rank#70, currency_rank#71] Input [5]: [item#67, return_ratio#68, currency_ratio#69, return_rank#70, currency_rank#71] -(50) Scan parquet spark_catalog.default.store_sales +(50) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#73, ss_ticket_number#74, ss_quantity#75, ss_net_paid#76, ss_net_profit#77, ss_sold_date_sk#78] Batched: true Location: InMemoryFileIndex [] @@ -324,7 +324,7 @@ Arguments: [ss_item_sk#73, ss_ticket_number#74, ss_quantity#75, ss_net_paid#76, Input [5]: [ss_item_sk#73, ss_ticket_number#74, ss_quantity#75, ss_net_paid#76, ss_sold_date_sk#78] Arguments: [ss_item_sk#73, ss_ticket_number#74, ss_quantity#75, ss_net_paid#76, ss_sold_date_sk#78] -(54) Scan parquet spark_catalog.default.store_returns +(54) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#80, sr_ticket_number#81, sr_return_quantity#82, sr_return_amt#83, sr_returned_date_sk#84] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -443,7 +443,7 @@ BroadcastExchange (82) +- CometScan parquet spark_catalog.default.date_dim (78) -(78) Scan parquet spark_catalog.default.date_dim +(78) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q5/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q5/explain.txt index e6d836892..a29ef4cec 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q5/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q5/explain.txt @@ -71,7 +71,7 @@ TakeOrderedAndProject (70) +- CometScan parquet spark_catalog.default.web_site (56) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -87,7 +87,7 @@ Condition : isnotnull(ss_store_sk#1) Input [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Arguments: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11], [ss_store_sk#1 AS store_sk#6, ss_sold_date_sk#4 AS date_sk#7, ss_ext_sales_price#2 AS sales_price#8, ss_net_profit#3 AS profit#9, 0.00 AS return_amt#10, 0.00 AS net_loss#11] -(4) Scan parquet spark_catalog.default.store_returns +(4) CometScan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -107,7 +107,7 @@ Arguments: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, n Child 0 Input [6]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11] Child 1 Input [6]: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, net_loss#21] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -135,7 +135,7 @@ Arguments: [date_sk#7], [d_date_sk#22], Inner, BuildRight Input [7]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11, d_date_sk#22] Arguments: [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11], [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11] -(14) Scan parquet spark_catalog.default.store +(14) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#24, s_store_id#25] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -180,7 +180,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#8)), sum(UnscaledValue(return_amt# Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#8))#34, sum(UnscaledValue(return_amt#10))#35, sum(UnscaledValue(profit#9))#36, sum(UnscaledValue(net_loss#11))#37] Results [5]: [MakeDecimal(sum(UnscaledValue(sales_price#8))#34,17,2) AS sales#38, MakeDecimal(sum(UnscaledValue(return_amt#10))#35,17,2) AS returns#39, (MakeDecimal(sum(UnscaledValue(profit#9))#36,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#11))#37,17,2)) AS profit#40, store channel AS channel#41, concat(store, s_store_id#25) AS id#42] -(23) Scan parquet spark_catalog.default.catalog_sales +(23) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_catalog_page_sk#43, cs_ext_sales_price#44, cs_net_profit#45, cs_sold_date_sk#46] Batched: true Location: InMemoryFileIndex [] @@ -196,7 +196,7 @@ Condition : isnotnull(cs_catalog_page_sk#43) Input [4]: [cs_catalog_page_sk#43, cs_ext_sales_price#44, cs_net_profit#45, cs_sold_date_sk#46] Arguments: [page_sk#48, date_sk#49, sales_price#50, profit#51, return_amt#52, net_loss#53], [cs_catalog_page_sk#43 AS page_sk#48, cs_sold_date_sk#46 AS date_sk#49, cs_ext_sales_price#44 AS sales_price#50, cs_net_profit#45 AS profit#51, 0.00 AS return_amt#52, 0.00 AS net_loss#53] -(26) Scan parquet spark_catalog.default.catalog_returns +(26) CometScan parquet spark_catalog.default.catalog_returns Output [4]: [cr_catalog_page_sk#54, cr_return_amount#55, cr_net_loss#56, cr_returned_date_sk#57] Batched: true Location: InMemoryFileIndex [] @@ -228,7 +228,7 @@ Arguments: [date_sk#49], [d_date_sk#64], Inner, BuildRight Input [7]: [page_sk#48, date_sk#49, sales_price#50, profit#51, return_amt#52, net_loss#53, d_date_sk#64] Arguments: [page_sk#48, sales_price#50, profit#51, return_amt#52, net_loss#53], [page_sk#48, sales_price#50, profit#51, return_amt#52, net_loss#53] -(33) Scan parquet spark_catalog.default.catalog_page +(33) CometScan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#65, cp_catalog_page_id#66] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -273,7 +273,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#50)), sum(UnscaledValue(return_amt Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#50))#75, sum(UnscaledValue(return_amt#52))#76, sum(UnscaledValue(profit#51))#77, sum(UnscaledValue(net_loss#53))#78] Results [5]: [MakeDecimal(sum(UnscaledValue(sales_price#50))#75,17,2) AS sales#79, MakeDecimal(sum(UnscaledValue(return_amt#52))#76,17,2) AS returns#80, (MakeDecimal(sum(UnscaledValue(profit#51))#77,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#53))#78,17,2)) AS profit#81, catalog channel AS channel#82, concat(catalog_page, cp_catalog_page_id#66) AS id#83] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_web_site_sk#84, ws_ext_sales_price#85, ws_net_profit#86, ws_sold_date_sk#87] Batched: true Location: InMemoryFileIndex [] @@ -289,7 +289,7 @@ Condition : isnotnull(ws_web_site_sk#84) Input [4]: [ws_web_site_sk#84, ws_ext_sales_price#85, ws_net_profit#86, ws_sold_date_sk#87] Arguments: [wsr_web_site_sk#89, date_sk#90, sales_price#91, profit#92, return_amt#93, net_loss#94], [ws_web_site_sk#84 AS wsr_web_site_sk#89, ws_sold_date_sk#87 AS date_sk#90, ws_ext_sales_price#85 AS sales_price#91, ws_net_profit#86 AS profit#92, 0.00 AS return_amt#93, 0.00 AS net_loss#94] -(45) Scan parquet spark_catalog.default.web_returns +(45) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#95, wr_order_number#96, wr_return_amt#97, wr_net_loss#98, wr_returned_date_sk#99] Batched: true Location: InMemoryFileIndex [] @@ -300,7 +300,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -126,7 +126,7 @@ Arguments: hashpartitioning(item_sk#11, d_date#6, 5), ENSURE_REQUIREMENTS, [plan Input [3]: [item_sk#11, d_date#6, cume_sales#13] Arguments: [item_sk#11 ASC NULLS FIRST, d_date#6 ASC NULLS FIRST], false, 0 -(19) Scan parquet spark_catalog.default.store_sales +(19) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#14, ss_sales_price#15, ss_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -235,7 +235,7 @@ BroadcastExchange (45) +- CometScan parquet spark_catalog.default.date_dim (41) -(41) Scan parquet spark_catalog.default.date_dim +(41) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q52/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q52/explain.txt index 6d4feea57..9e017b927 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q52/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q52/explain.txt @@ -20,7 +20,7 @@ TakeOrderedAndProject (19) +- CometScan parquet spark_catalog.default.item (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((((isnotnull(d_moy#3) AND isnotnull(d_year#2)) AND (d_moy#3 = 11)) Input [3]: [d_date_sk#1, d_year#2, d_moy#3] Arguments: [d_date_sk#1, d_year#2], [d_date_sk#1, d_year#2] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [5]: [d_date_sk#1, d_year#2, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Arguments: [d_year#2, ss_item_sk#4, ss_ext_sales_price#5], [d_year#2, ss_item_sk#4, ss_ext_sales_price#5] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_brand_id#8, i_brand#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q53/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q53/explain.txt index e85d73a07..631d21aa5 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q53/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q53/explain.txt @@ -30,7 +30,7 @@ TakeOrderedAndProject (29) +- CometScan parquet spark_catalog.default.store (15) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4, i_manufact_id#5] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -45,7 +45,7 @@ Condition : ((((i_category#4 IN (Books Input [5]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4, i_manufact_id#5] Arguments: [i_item_sk#1, i_manufact_id#5], [i_item_sk#1, i_manufact_id#5] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#10, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -70,7 +70,7 @@ Arguments: [i_item_sk#1], [ss_item_sk#10], Inner, BuildRight Input [6]: [i_item_sk#1, i_manufact_id#5, ss_item_sk#10, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] Arguments: [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13], [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] -(9) Scan parquet spark_catalog.default.date_dim +(9) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_month_seq#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -98,7 +98,7 @@ Arguments: [ss_sold_date_sk#13], [d_date_sk#15], Inner, BuildRight Input [6]: [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13, d_date_sk#15, d_qoy#17] Arguments: [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, d_qoy#17], [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, d_qoy#17] -(15) Scan parquet spark_catalog.default.store +(15) CometScan parquet spark_catalog.default.store Output [1]: [s_store_sk#18] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -177,7 +177,7 @@ BroadcastExchange (34) +- CometScan parquet spark_catalog.default.date_dim (30) -(30) Scan parquet spark_catalog.default.date_dim +(30) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_month_seq#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q54/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q54/explain.txt index a0ce63a83..a1fce65e8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q54/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q54/explain.txt @@ -57,7 +57,7 @@ TakeOrderedAndProject (56) +- CometScan parquet spark_catalog.default.date_dim (43) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -73,7 +73,7 @@ Condition : (isnotnull(cs_item_sk#2) AND isnotnull(cs_bill_customer_sk#1)) Input [3]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_sold_date_sk#3] Arguments: [sold_date_sk#5, customer_sk#6, item_sk#7], [cs_sold_date_sk#3 AS sold_date_sk#5, cs_bill_customer_sk#1 AS customer_sk#6, cs_item_sk#2 AS item_sk#7] -(4) Scan parquet spark_catalog.default.web_sales +(4) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#8, ws_bill_customer_sk#9, ws_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -93,7 +93,7 @@ Arguments: [sold_date_sk#11, customer_sk#12, item_sk#13], [ws_sold_date_sk#10 AS Child 0 Input [3]: [sold_date_sk#5, customer_sk#6, item_sk#7] Child 1 Input [3]: [sold_date_sk#11, customer_sk#12, item_sk#13] -(8) Scan parquet spark_catalog.default.item +(8) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#14, i_class#15, i_category#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -121,7 +121,7 @@ Arguments: [item_sk#7], [i_item_sk#14], Inner, BuildRight Input [4]: [sold_date_sk#5, customer_sk#6, item_sk#7, i_item_sk#14] Arguments: [sold_date_sk#5, customer_sk#6], [sold_date_sk#5, customer_sk#6] -(14) Scan parquet spark_catalog.default.date_dim +(14) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#17, d_year#18, d_moy#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -149,7 +149,7 @@ Arguments: [sold_date_sk#5], [d_date_sk#17], Inner, BuildRight Input [3]: [sold_date_sk#5, customer_sk#6, d_date_sk#17] Arguments: [customer_sk#6], [customer_sk#6] -(20) Scan parquet spark_catalog.default.customer +(20) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#20, c_current_addr_sk#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -187,7 +187,7 @@ Input [2]: [c_customer_sk#20, c_current_addr_sk#21] Keys [2]: [c_customer_sk#20, c_current_addr_sk#21] Functions: [] -(28) Scan parquet spark_catalog.default.store_sales +(28) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_customer_sk#22, ss_ext_sales_price#23, ss_sold_date_sk#24] Batched: true Location: InMemoryFileIndex [] @@ -212,7 +212,7 @@ Arguments: [c_customer_sk#20], [ss_customer_sk#22], Inner, BuildRight Input [5]: [c_customer_sk#20, c_current_addr_sk#21, ss_customer_sk#22, ss_ext_sales_price#23, ss_sold_date_sk#24] Arguments: [c_customer_sk#20, c_current_addr_sk#21, ss_ext_sales_price#23, ss_sold_date_sk#24], [c_customer_sk#20, c_current_addr_sk#21, ss_ext_sales_price#23, ss_sold_date_sk#24] -(33) Scan parquet spark_catalog.default.customer_address +(33) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#26, ca_county#27, ca_state#28] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -236,7 +236,7 @@ Arguments: [c_current_addr_sk#21], [ca_address_sk#26], Inner, BuildRight Input [7]: [c_customer_sk#20, c_current_addr_sk#21, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_address_sk#26, ca_county#27, ca_state#28] Arguments: [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_county#27, ca_state#28], [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_county#27, ca_state#28] -(38) Scan parquet spark_catalog.default.store +(38) CometScan parquet spark_catalog.default.store Output [2]: [s_county#29, s_state#30] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -260,7 +260,7 @@ Arguments: [ca_county#27, ca_state#28], [s_county#29, s_state#30], Inner, BuildR Input [7]: [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_county#27, ca_state#28, s_county#29, s_state#30] Arguments: [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24], [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24] -(43) Scan parquet spark_catalog.default.date_dim +(43) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#31, d_month_seq#32] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -341,7 +341,7 @@ BroadcastExchange (61) +- CometScan parquet spark_catalog.default.date_dim (57) -(57) Scan parquet spark_catalog.default.date_dim +(57) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#17, d_year#18, d_moy#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -373,7 +373,7 @@ BroadcastExchange (66) +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#31, d_month_seq#32] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -409,7 +409,7 @@ Subquery:6 Hosting operator id = 62 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (67) -(67) Scan parquet spark_catalog.default.date_dim +(67) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#46, d_year#47, d_moy#48] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -451,7 +451,7 @@ Subquery:7 Hosting operator id = 62 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (74) -(74) Scan parquet spark_catalog.default.date_dim +(74) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#50, d_year#51, d_moy#52] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q55/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q55/explain.txt index dcd32a96a..fc018e3d1 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q55/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q55/explain.txt @@ -20,7 +20,7 @@ TakeOrderedAndProject (19) +- CometScan parquet spark_catalog.default.item (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((((isnotnull(d_moy#3) AND isnotnull(d_year#2)) AND (d_moy#3 = 11)) Input [3]: [d_date_sk#1, d_year#2, d_moy#3] Arguments: [d_date_sk#1], [d_date_sk#1] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [4]: [d_date_sk#1, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Arguments: [ss_item_sk#4, ss_ext_sales_price#5], [ss_item_sk#4, ss_ext_sales_price#5] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_brand_id#8, i_brand#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q56/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q56/explain.txt index bbfd9dcaf..47fc0065e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q56/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q56/explain.txt @@ -64,7 +64,7 @@ TakeOrderedAndProject (63) +- ReusedExchange (52) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -76,7 +76,7 @@ ReadSchema: struct Input [2]: [i_item_sk#11, i_item_id#12] Condition : isnotnull(i_item_sk#11) -(17) Scan parquet spark_catalog.default.item +(17) CometScan parquet spark_catalog.default.item Output [2]: [i_item_id#13, i_color#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -201,7 +201,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] Aggregate Attributes [1]: [sum(UnscaledValue(ss_ext_sales_price#3))#17] Results [2]: [i_item_id#12, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#3))#17,17,2) AS total_sales#18] -(29) Scan parquet spark_catalog.default.catalog_sales +(29) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#19, cs_item_sk#20, cs_ext_sales_price#21, cs_sold_date_sk#22] Batched: true Location: InMemoryFileIndex [] @@ -270,7 +270,7 @@ Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#21))] Aggregate Attributes [1]: [sum(UnscaledValue(cs_ext_sales_price#21))#30] Results [2]: [i_item_id#27, MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#21))#30,17,2) AS total_sales#31] -(44) Scan parquet spark_catalog.default.web_sales +(44) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#32, ws_bill_addr_sk#33, ws_ext_sales_price#34, ws_sold_date_sk#35] Batched: true Location: InMemoryFileIndex [] @@ -373,7 +373,7 @@ BroadcastExchange (68) +- CometScan parquet spark_catalog.default.date_dim (64) -(64) Scan parquet spark_catalog.default.date_dim +(64) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#7, d_moy#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q57/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q57/explain.txt index 76b1adf1a..8423e825d 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q57/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q57/explain.txt @@ -46,7 +46,7 @@ TakeOrderedAndProject (45) +- ReusedExchange (38) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#1, i_brand#2, i_category#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -57,7 +57,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.catalog_sales +(3) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -82,7 +82,7 @@ Arguments: [i_item_sk#1], [cs_item_sk#5], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -106,7 +106,7 @@ Arguments: [cs_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.call_center +(13) CometScan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#12, cc_name#13] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] @@ -261,7 +261,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q58/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q58/explain.txt index 2c82f91e9..809315c9e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q58/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q58/explain.txt @@ -54,7 +54,7 @@ TakeOrderedAndProject (53) +- ReusedExchange (42) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -66,7 +66,7 @@ ReadSchema: struct Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_item_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -90,7 +90,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [5]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, i_item_id#6] Arguments: [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6], [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -151,7 +151,7 @@ Results [2]: [i_item_id#6 AS item_id#13, MakeDecimal(sum(UnscaledValue(ss_ext_sa Input [2]: [item_id#13, ss_item_rev#14] Condition : isnotnull(ss_item_rev#14) -(21) Scan parquet spark_catalog.default.catalog_sales +(21) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#15, cs_ext_sales_price#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -226,7 +226,7 @@ Join condition: ((((cast(ss_item_rev#14 as decimal(19,3)) >= (0.9 * cs_item_rev# Output [3]: [item_id#13, ss_item_rev#14, cs_item_rev#26] Input [4]: [item_id#13, ss_item_rev#14, item_id#25, cs_item_rev#26] -(37) Scan parquet spark_catalog.default.web_sales +(37) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#27, ws_ext_sales_price#28, ws_sold_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -320,7 +320,7 @@ BroadcastExchange (63) +- CometScan parquet spark_catalog.default.date_dim (56) -(54) Scan parquet spark_catalog.default.date_dim +(54) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -331,7 +331,7 @@ ReadSchema: struct Input [2]: [d_date_sk#7, d_date#8] Condition : isnotnull(d_date_sk#7) -(56) Scan parquet spark_catalog.default.date_dim +(56) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#9, d_week_seq#43] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -375,7 +375,7 @@ Subquery:3 Hosting operator id = 56 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (64) -(64) Scan parquet spark_catalog.default.date_dim +(64) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#46, d_week_seq#47] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q59/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q59/explain.txt index afa9351c3..b471c3435 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q59/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q59/explain.txt @@ -44,7 +44,7 @@ TakeOrderedAndProject (43) +- CometScan parquet spark_catalog.default.date_dim (33) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#1, ss_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -56,7 +56,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_week_seq#5, d_day_name#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -101,7 +101,7 @@ Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_s Aggregate Attributes [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_sales_price#2 END))#21, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Monday ) THEN ss_sales_price#2 END))#22, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Tuesday ) THEN ss_sales_price#2 END))#23, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Wednesday) THEN ss_sales_price#2 END))#24, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Thursday ) THEN ss_sales_price#2 END))#25, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Friday ) THEN ss_sales_price#2 END))#26, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Saturday ) THEN ss_sales_price#2 END))#27] Results [9]: [d_week_seq#5, ss_store_sk#1, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_sales_price#2 END))#21,17,2) AS sun_sales#28, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Monday ) THEN ss_sales_price#2 END))#22,17,2) AS mon_sales#29, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Tuesday ) THEN ss_sales_price#2 END))#23,17,2) AS tue_sales#30, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Wednesday) THEN ss_sales_price#2 END))#24,17,2) AS wed_sales#31, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Thursday ) THEN ss_sales_price#2 END))#25,17,2) AS thu_sales#32, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Friday ) THEN ss_sales_price#2 END))#26,17,2) AS fri_sales#33, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Saturday ) THEN ss_sales_price#2 END))#27,17,2) AS sat_sales#34] -(12) Scan parquet spark_catalog.default.store +(12) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#35, s_store_id#36, s_store_name#37] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -129,7 +129,7 @@ Join condition: None Output [10]: [d_week_seq#5, sun_sales#28, mon_sales#29, tue_sales#30, wed_sales#31, thu_sales#32, fri_sales#33, sat_sales#34, s_store_id#36, s_store_name#37] Input [12]: [d_week_seq#5, ss_store_sk#1, sun_sales#28, mon_sales#29, tue_sales#30, wed_sales#31, thu_sales#32, fri_sales#33, sat_sales#34, s_store_sk#35, s_store_id#36, s_store_name#37] -(18) Scan parquet spark_catalog.default.date_dim +(18) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_month_seq#38, d_week_seq#39] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -171,7 +171,7 @@ Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#59 = Sunday ) THEN ss_ Aggregate Attributes [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#59 = Sunday ) THEN ss_sales_price#60 END))#21, sum(UnscaledValue(CASE WHEN (d_day_name#59 = Monday ) THEN ss_sales_price#60 END))#22, sum(UnscaledValue(CASE WHEN (d_day_name#59 = Tuesday ) THEN ss_sales_price#60 END))#23, sum(UnscaledValue(CASE WHEN (d_day_name#59 = Wednesday) THEN ss_sales_price#60 END))#24, sum(UnscaledValue(CASE WHEN (d_day_name#59 = Thursday ) THEN ss_sales_price#60 END))#25, sum(UnscaledValue(CASE WHEN (d_day_name#59 = Friday ) THEN ss_sales_price#60 END))#26, sum(UnscaledValue(CASE WHEN (d_day_name#59 = Saturday ) THEN ss_sales_price#60 END))#27] Results [9]: [d_week_seq#50, ss_store_sk#51, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#59 = Sunday ) THEN ss_sales_price#60 END))#21,17,2) AS sun_sales#61, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#59 = Monday ) THEN ss_sales_price#60 END))#22,17,2) AS mon_sales#62, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#59 = Tuesday ) THEN ss_sales_price#60 END))#23,17,2) AS tue_sales#63, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#59 = Wednesday) THEN ss_sales_price#60 END))#24,17,2) AS wed_sales#64, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#59 = Thursday ) THEN ss_sales_price#60 END))#25,17,2) AS thu_sales#65, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#59 = Friday ) THEN ss_sales_price#60 END))#26,17,2) AS fri_sales#66, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#59 = Saturday ) THEN ss_sales_price#60 END))#27,17,2) AS sat_sales#67] -(27) Scan parquet spark_catalog.default.store +(27) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#68, s_store_id#69] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -199,7 +199,7 @@ Join condition: None Output [9]: [d_week_seq#50, sun_sales#61, mon_sales#62, tue_sales#63, wed_sales#64, thu_sales#65, fri_sales#66, sat_sales#67, s_store_id#69] Input [11]: [d_week_seq#50, ss_store_sk#51, sun_sales#61, mon_sales#62, tue_sales#63, wed_sales#64, thu_sales#65, fri_sales#66, sat_sales#67, s_store_sk#68, s_store_id#69] -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_month_seq#70, d_week_seq#71] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q6/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q6/explain.txt index 7c380571e..80ba7d7f4 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q6/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q6/explain.txt @@ -41,7 +41,7 @@ TakeOrderedAndProject (40) +- CometScan parquet spark_catalog.default.item (23) -(1) Scan parquet spark_catalog.default.customer_address +(1) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#1, ca_state#2] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -52,7 +52,7 @@ ReadSchema: struct Input [2]: [ca_address_sk#1, ca_state#2] Condition : isnotnull(ca_address_sk#1) -(3) Scan parquet spark_catalog.default.customer +(3) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#3, c_current_addr_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -76,7 +76,7 @@ Arguments: [ca_address_sk#1], [c_current_addr_sk#4], Inner, BuildRight Input [4]: [ca_address_sk#1, ca_state#2, c_customer_sk#3, c_current_addr_sk#4] Arguments: [ca_state#2, c_customer_sk#3], [ca_state#2, c_customer_sk#3] -(8) Scan parquet spark_catalog.default.store_sales +(8) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -101,7 +101,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], Inner, BuildRight Input [5]: [ca_state#2, c_customer_sk#3, ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Arguments: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7], [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7] -(13) Scan parquet spark_catalog.default.date_dim +(13) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -132,7 +132,7 @@ Arguments: [ca_state#2, ss_item_sk#5], [ca_state#2, ss_item_sk#5] (19) ColumnarToRow [codegen id : 4] Input [2]: [ca_state#2, ss_item_sk#5] -(20) Scan parquet spark_catalog.default.item +(20) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#13, i_current_price#14, i_category#15] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -146,7 +146,7 @@ Condition : ((isnotnull(i_current_price#14) AND isnotnull(i_category#15)) AND is (22) ColumnarToRow [codegen id : 3] Input [3]: [i_item_sk#13, i_current_price#14, i_category#15] -(23) Scan parquet spark_catalog.default.item +(23) CometScan parquet spark_catalog.default.item Output [2]: [i_current_price#16, i_category#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -246,7 +246,7 @@ BroadcastExchange (45) +- CometScan parquet spark_catalog.default.date_dim (41) -(41) Scan parquet spark_catalog.default.date_dim +(41) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -280,7 +280,7 @@ Subquery:3 Hosting operator id = 41 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#29, d_year#30, d_moy#31] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q60/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q60/explain.txt index 342a3deb8..09e20ef23 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q60/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q60/explain.txt @@ -64,7 +64,7 @@ TakeOrderedAndProject (63) +- ReusedExchange (52) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -76,7 +76,7 @@ ReadSchema: struct Input [2]: [i_item_sk#11, i_item_id#12] Condition : isnotnull(i_item_sk#11) -(17) Scan parquet spark_catalog.default.item +(17) CometScan parquet spark_catalog.default.item Output [2]: [i_item_id#13, i_category#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -201,7 +201,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] Aggregate Attributes [1]: [sum(UnscaledValue(ss_ext_sales_price#3))#17] Results [2]: [i_item_id#12, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#3))#17,17,2) AS total_sales#18] -(29) Scan parquet spark_catalog.default.catalog_sales +(29) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#19, cs_item_sk#20, cs_ext_sales_price#21, cs_sold_date_sk#22] Batched: true Location: InMemoryFileIndex [] @@ -270,7 +270,7 @@ Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#21))] Aggregate Attributes [1]: [sum(UnscaledValue(cs_ext_sales_price#21))#30] Results [2]: [i_item_id#27, MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#21))#30,17,2) AS total_sales#31] -(44) Scan parquet spark_catalog.default.web_sales +(44) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#32, ws_bill_addr_sk#33, ws_ext_sales_price#34, ws_sold_date_sk#35] Batched: true Location: InMemoryFileIndex [] @@ -373,7 +373,7 @@ BroadcastExchange (68) +- CometScan parquet spark_catalog.default.date_dim (64) -(64) Scan parquet spark_catalog.default.date_dim +(64) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#7, d_moy#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q61/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q61/explain.txt index 6d0d03e13..098b72016 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q61/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q61/explain.txt @@ -66,7 +66,7 @@ +- ReusedExchange (56) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_promo_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -78,7 +78,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Condition : (isnotnull(ss_store_sk#4) AND isnotnull(ss_item_sk#3)) -(6) Scan parquet spark_catalog.default.date_dim +(6) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#8, d_month_seq#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -136,7 +136,7 @@ Join condition: None Output [4]: [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#13] Input [5]: [s_store_sk#1, s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#13] -(20) Scan parquet spark_catalog.default.item +(20) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#14, i_item_desc#15, i_current_price#16, i_wholesale_cost#17, i_brand#18] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -164,7 +164,7 @@ Join condition: None Output [7]: [s_store_name#2, ss_store_sk#4, revenue#13, i_item_desc#15, i_current_price#16, i_wholesale_cost#17, i_brand#18] Input [9]: [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#13, i_item_sk#14, i_item_desc#15, i_current_price#16, i_wholesale_cost#17, i_brand#18] -(26) Scan parquet spark_catalog.default.store_sales +(26) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#19, ss_store_sk#20, ss_sales_price#21, ss_sold_date_sk#22] Batched: true Location: InMemoryFileIndex [] @@ -259,7 +259,7 @@ BroadcastExchange (47) +- CometScan parquet spark_catalog.default.date_dim (43) -(43) Scan parquet spark_catalog.default.date_dim +(43) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#8, d_month_seq#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q66/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q66/explain.txt index ea11cbb74..05f7b272b 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q66/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q66/explain.txt @@ -52,7 +52,7 @@ TakeOrderedAndProject (51) +- ReusedExchange (40) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [7]: [ws_sold_time_sk#1, ws_ship_mode_sk#2, ws_warehouse_sk#3, ws_quantity#4, ws_ext_sales_price#5, ws_net_paid#6, ws_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -64,7 +64,7 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [c_customer_sk#1], [ss_customer_sk#4], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ Right keys [1]: [ws_bill_customer_sk#10] Join type: LeftAnti Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#14, cs_sold_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -171,7 +171,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#2, c_current_addr_sk#3] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(28) Scan parquet spark_catalog.default.customer_address +(28) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#18, ca_state#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -203,7 +203,7 @@ Join condition: None Output [1]: [c_current_cdemo_sk#2] Input [3]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#18] -(35) Scan parquet spark_catalog.default.customer_demographics +(35) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#20, cd_gender#21, cd_marital_status#22, cd_education_status#23, cd_purchase_estimate#24, cd_credit_rating#25] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q7/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q7/explain.txt index 5755ed1dc..4dbf5f775 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q7/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q7/explain.txt @@ -31,7 +31,7 @@ TakeOrderedAndProject (30) +- CometScan parquet spark_catalog.default.promotion (20) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [8]: [ss_item_sk#1, ss_cdemo_sk#2, ss_promo_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -43,7 +43,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -89,7 +89,7 @@ Arguments: [ss_store_sk#1, ss_net_profit#2], [ss_store_sk#1, ss_net_profit#2] (9) ColumnarToRow [codegen id : 5] Input [2]: [ss_store_sk#1, ss_net_profit#2] -(10) Scan parquet spark_catalog.default.store +(10) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#7, s_county#8, s_state#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -103,7 +103,7 @@ Condition : isnotnull(s_store_sk#7) (12) ColumnarToRow [codegen id : 4] Input [3]: [s_store_sk#7, s_county#8, s_state#9] -(13) Scan parquet spark_catalog.default.store_sales +(13) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -115,7 +115,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Condition : isnotnull(ss_store_sk#10) -(15) Scan parquet spark_catalog.default.store +(15) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#14, s_state#15] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -268,7 +268,7 @@ BroadcastExchange (50) +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q71/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q71/explain.txt index c4b15d4e5..a36aa8162 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q71/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q71/explain.txt @@ -39,7 +39,7 @@ +- CometScan parquet spark_catalog.default.time_dim (26) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#1, i_brand_id#2, i_brand#3, i_manager_id#4] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -58,7 +58,7 @@ Arguments: [i_item_sk#1, i_brand_id#2, i_brand#3], [i_item_sk#1, i_brand_id#2, i Input [3]: [i_item_sk#1, i_brand_id#2, i_brand#3] Arguments: [i_item_sk#1, i_brand_id#2, i_brand#3] -(5) Scan parquet spark_catalog.default.web_sales +(5) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_sold_time_sk#5, ws_item_sk#6, ws_ext_sales_price#7, ws_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -70,7 +70,7 @@ ReadSchema: struct date_add(d_d Input [10]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24, d_date_sk#28, d_date#29] Arguments: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] -(46) Scan parquet spark_catalog.default.promotion +(46) CometScan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#30] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -314,7 +314,7 @@ Arguments: hashpartitioning(cs_item_sk#4, cs_order_number#6, 5), ENSURE_REQUIREM Input [5]: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Arguments: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4 ASC NULLS FIRST, cs_order_number#6 ASC NULLS FIRST] -(53) Scan parquet spark_catalog.default.catalog_returns +(53) CometScan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#31, cr_order_number#32, cr_returned_date_sk#33] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -377,7 +377,7 @@ BroadcastExchange (69) +- CometScan parquet spark_catalog.default.date_dim (65) -(65) Scan parquet spark_catalog.default.date_dim +(65) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_date_sk#22, d_date#23, d_week_seq#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q73/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q73/explain.txt index 7530f5aa5..e4d1ff5a2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q73/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q73/explain.txt @@ -33,7 +33,7 @@ +- CometScan parquet spark_catalog.default.customer (25) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -45,7 +45,7 @@ ReadSchema: struct= 1) AND (cnt#17 <= 5)) -(25) Scan parquet spark_catalog.default.customer +(25) CometScan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -192,7 +192,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_dom#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q74/explain.txt index d5eb481f1..70e62b679 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q74/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q74/explain.txt @@ -72,7 +72,7 @@ TakeOrderedAndProject (71) +- ReusedExchange (61) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -83,7 +83,7 @@ ReadSchema: struct 0.00)) -(18) Scan parquet spark_catalog.default.customer +(18) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#16, c_customer_id#17, c_first_name#18, c_last_name#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -168,7 +168,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -83,7 +83,7 @@ Arguments: [ss_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3, d_date_sk#5] Arguments: [ss_store_sk#1, ss_net_profit#2], [ss_store_sk#1, ss_net_profit#2] -(9) Scan parquet spark_catalog.default.store +(9) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#8, s_store_name#9, s_zip#10] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -107,7 +107,7 @@ Arguments: [ss_store_sk#1], [s_store_sk#8], Inner, BuildRight Input [5]: [ss_store_sk#1, ss_net_profit#2, s_store_sk#8, s_store_name#9, s_zip#10] Arguments: [ss_net_profit#2, s_store_name#9, s_zip#10], [ss_net_profit#2, s_store_name#9, s_zip#10] -(14) Scan parquet spark_catalog.default.customer_address +(14) CometScan parquet spark_catalog.default.customer_address Output [1]: [ca_zip#11] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -121,7 +121,7 @@ Condition : (substr(ca_zip#11, 1, 5) INSET 10144, 10336, 10390, 10445, 10516, 10 Input [1]: [ca_zip#11] Arguments: [ca_zip#12], [substr(ca_zip#11, 1, 5) AS ca_zip#12] -(17) Scan parquet spark_catalog.default.customer_address +(17) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#13, ca_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -132,7 +132,7 @@ ReadSchema: struct Input [2]: [ca_address_sk#13, ca_zip#14] Condition : isnotnull(ca_address_sk#13) -(19) Scan parquet spark_catalog.default.customer +(19) CometScan parquet spark_catalog.default.customer Output [2]: [c_current_addr_sk#15, c_preferred_cust_flag#16] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -253,7 +253,7 @@ BroadcastExchange (47) +- CometScan parquet spark_catalog.default.date_dim (43) -(43) Scan parquet spark_catalog.default.date_dim +(43) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q80/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q80/explain.txt index 518dc7d83..63109f7d7 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q80/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q80/explain.txt @@ -103,7 +103,7 @@ TakeOrderedAndProject (102) +- ReusedExchange (90) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -123,7 +123,7 @@ Arguments: hashpartitioning(ss_item_sk#1, ss_ticket_number#4, 5), ENSURE_REQUIRE Input [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Arguments: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7], [ss_item_sk#1 ASC NULLS FIRST, ss_ticket_number#4 ASC NULLS FIRST] -(5) Scan parquet spark_catalog.default.store_returns +(5) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#9, sr_ticket_number#10, sr_return_amt#11, sr_net_loss#12, sr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -155,7 +155,7 @@ Arguments: [ss_item_sk#1, ss_ticket_number#4], [sr_item_sk#9, sr_ticket_number#1 Input [11]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_item_sk#9, sr_ticket_number#10, sr_return_amt#11, sr_net_loss#12] Arguments: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12], [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12] -(12) Scan parquet spark_catalog.default.date_dim +(12) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -183,7 +183,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#14], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12, d_date_sk#14] Arguments: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12], [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12] -(18) Scan parquet spark_catalog.default.store +(18) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#16, s_store_id#17] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -207,7 +207,7 @@ Arguments: [ss_store_sk#2], [s_store_sk#16], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_sk#16, s_store_id#17] Arguments: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17], [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17] -(23) Scan parquet spark_catalog.default.item +(23) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#18, i_current_price#19] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -235,7 +235,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#18], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17, i_item_sk#18] Arguments: [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17], [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17] -(29) Scan parquet spark_catalog.default.promotion +(29) CometScan parquet spark_catalog.default.promotion Output [2]: [p_promo_sk#20, p_channel_tv#21] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -284,7 +284,7 @@ Functions [3]: [sum(UnscaledValue(ss_ext_sales_price#5)), sum(coalesce(cast(sr_r Aggregate Attributes [3]: [sum(UnscaledValue(ss_ext_sales_price#5))#32, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#33, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#34] Results [5]: [MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#5))#32,17,2) AS sales#35, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#33 AS returns#36, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#34 AS profit#37, store channel AS channel#38, concat(store, s_store_id#17) AS id#39] -(39) Scan parquet spark_catalog.default.catalog_sales +(39) CometScan parquet spark_catalog.default.catalog_sales Output [7]: [cs_catalog_page_sk#40, cs_item_sk#41, cs_promo_sk#42, cs_order_number#43, cs_ext_sales_price#44, cs_net_profit#45, cs_sold_date_sk#46] Batched: true Location: InMemoryFileIndex [] @@ -304,7 +304,7 @@ Arguments: hashpartitioning(cs_item_sk#41, cs_order_number#43, 5), ENSURE_REQUIR Input [7]: [cs_catalog_page_sk#40, cs_item_sk#41, cs_promo_sk#42, cs_order_number#43, cs_ext_sales_price#44, cs_net_profit#45, cs_sold_date_sk#46] Arguments: [cs_catalog_page_sk#40, cs_item_sk#41, cs_promo_sk#42, cs_order_number#43, cs_ext_sales_price#44, cs_net_profit#45, cs_sold_date_sk#46], [cs_item_sk#41 ASC NULLS FIRST, cs_order_number#43 ASC NULLS FIRST] -(43) Scan parquet spark_catalog.default.catalog_returns +(43) CometScan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#48, cr_order_number#49, cr_return_amount#50, cr_net_loss#51, cr_returned_date_sk#52] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -348,7 +348,7 @@ Arguments: [cs_sold_date_sk#46], [d_date_sk#53], Inner, BuildRight Input [9]: [cs_catalog_page_sk#40, cs_item_sk#41, cs_promo_sk#42, cs_ext_sales_price#44, cs_net_profit#45, cs_sold_date_sk#46, cr_return_amount#50, cr_net_loss#51, d_date_sk#53] Arguments: [cs_catalog_page_sk#40, cs_item_sk#41, cs_promo_sk#42, cs_ext_sales_price#44, cs_net_profit#45, cr_return_amount#50, cr_net_loss#51], [cs_catalog_page_sk#40, cs_item_sk#41, cs_promo_sk#42, cs_ext_sales_price#44, cs_net_profit#45, cr_return_amount#50, cr_net_loss#51] -(53) Scan parquet spark_catalog.default.catalog_page +(53) CometScan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#54, cp_catalog_page_id#55] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -417,7 +417,7 @@ Functions [3]: [sum(UnscaledValue(cs_ext_sales_price#44)), sum(coalesce(cast(cr_ Aggregate Attributes [3]: [sum(UnscaledValue(cs_ext_sales_price#44))#68, sum(coalesce(cast(cr_return_amount#50 as decimal(12,2)), 0.00))#69, sum((cs_net_profit#45 - coalesce(cast(cr_net_loss#51 as decimal(12,2)), 0.00)))#70] Results [5]: [MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#44))#68,17,2) AS sales#71, sum(coalesce(cast(cr_return_amount#50 as decimal(12,2)), 0.00))#69 AS returns#72, sum((cs_net_profit#45 - coalesce(cast(cr_net_loss#51 as decimal(12,2)), 0.00)))#70 AS profit#73, catalog channel AS channel#74, concat(catalog_page, cp_catalog_page_id#55) AS id#75] -(68) Scan parquet spark_catalog.default.web_sales +(68) CometScan parquet spark_catalog.default.web_sales Output [7]: [ws_item_sk#76, ws_web_site_sk#77, ws_promo_sk#78, ws_order_number#79, ws_ext_sales_price#80, ws_net_profit#81, ws_sold_date_sk#82] Batched: true Location: InMemoryFileIndex [] @@ -437,7 +437,7 @@ Arguments: hashpartitioning(ws_item_sk#76, ws_order_number#79, 5), ENSURE_REQUIR Input [7]: [ws_item_sk#76, ws_web_site_sk#77, ws_promo_sk#78, ws_order_number#79, ws_ext_sales_price#80, ws_net_profit#81, ws_sold_date_sk#82] Arguments: [ws_item_sk#76, ws_web_site_sk#77, ws_promo_sk#78, ws_order_number#79, ws_ext_sales_price#80, ws_net_profit#81, ws_sold_date_sk#82], [ws_item_sk#76 ASC NULLS FIRST, ws_order_number#79 ASC NULLS FIRST] -(72) Scan parquet spark_catalog.default.web_returns +(72) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#84, wr_order_number#85, wr_return_amt#86, wr_net_loss#87, wr_returned_date_sk#88] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -481,7 +481,7 @@ Arguments: [ws_sold_date_sk#82], [d_date_sk#89], Inner, BuildRight Input [9]: [ws_item_sk#76, ws_web_site_sk#77, ws_promo_sk#78, ws_ext_sales_price#80, ws_net_profit#81, ws_sold_date_sk#82, wr_return_amt#86, wr_net_loss#87, d_date_sk#89] Arguments: [ws_item_sk#76, ws_web_site_sk#77, ws_promo_sk#78, ws_ext_sales_price#80, ws_net_profit#81, wr_return_amt#86, wr_net_loss#87], [ws_item_sk#76, ws_web_site_sk#77, ws_promo_sk#78, ws_ext_sales_price#80, ws_net_profit#81, wr_return_amt#86, wr_net_loss#87] -(82) Scan parquet spark_catalog.default.web_site +(82) CometScan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#90, web_site_id#91] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] @@ -588,7 +588,7 @@ BroadcastExchange (107) +- CometScan parquet spark_catalog.default.date_dim (103) -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q81/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q81/explain.txt index b266f4a24..6b252f661 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q81/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q81/explain.txt @@ -51,7 +51,7 @@ TakeOrderedAndProject (50) +- CometScan parquet spark_catalog.default.customer_address (44) -(1) Scan parquet spark_catalog.default.catalog_returns +(1) CometScan parquet spark_catalog.default.catalog_returns Output [4]: [cr_returning_customer_sk#1, cr_returning_addr_sk#2, cr_return_amt_inc_tax#3, cr_returned_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -63,7 +63,7 @@ ReadSchema: struct (avg(ctr_total_ret Output [2]: [ctr_customer_sk#13, ctr_total_return#15] Input [5]: [ctr_customer_sk#13, ctr_state#14, ctr_total_return#15, (avg(ctr_total_return) * 1.2)#33, ctr_state#26] -(38) Scan parquet spark_catalog.default.customer +(38) CometScan parquet spark_catalog.default.customer Output [6]: [c_customer_sk#34, c_customer_id#35, c_current_addr_sk#36, c_salutation#37, c_first_name#38, c_last_name#39] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -261,7 +261,7 @@ Join condition: None Output [6]: [ctr_total_return#15, c_customer_id#35, c_current_addr_sk#36, c_salutation#37, c_first_name#38, c_last_name#39] Input [8]: [ctr_customer_sk#13, ctr_total_return#15, c_customer_sk#34, c_customer_id#35, c_current_addr_sk#36, c_salutation#37, c_first_name#38, c_last_name#39] -(44) Scan parquet spark_catalog.default.customer_address +(44) CometScan parquet spark_catalog.default.customer_address Output [12]: [ca_address_sk#40, ca_street_number#41, ca_street_name#42, ca_street_type#43, ca_suite_number#44, ca_city#45, ca_county#46, ca_state#47, ca_zip#48, ca_country#49, ca_gmt_offset#50, ca_location_type#51] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -303,7 +303,7 @@ BroadcastExchange (55) +- CometScan parquet spark_catalog.default.date_dim (51) -(51) Scan parquet spark_catalog.default.date_dim +(51) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_year#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q82/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q82/explain.txt index 774b6b112..34319a8fc 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q82/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q82/explain.txt @@ -27,7 +27,7 @@ +- CometScan parquet spark_catalog.default.store_sales (17) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -42,7 +42,7 @@ Condition : ((((isnotnull(i_current_price#4) AND (i_current_price#4 >= 62.00)) A Input [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(4) Scan parquet spark_catalog.default.inventory +(4) CometScan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#6, inv_quantity_on_hand#7, inv_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -71,7 +71,7 @@ Arguments: [i_item_sk#1], [inv_item_sk#6], Inner, BuildRight Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item_sk#6, inv_date_sk#8] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(17) Scan parquet spark_catalog.default.store_sales +(17) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#12, ss_sold_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -158,7 +158,7 @@ BroadcastExchange (31) +- CometScan parquet spark_catalog.default.date_dim (27) -(27) Scan parquet spark_catalog.default.date_dim +(27) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q83.ansi/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q83.ansi/explain.txt index d9d319813..c073c4cca 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q83.ansi/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q83.ansi/explain.txt @@ -51,7 +51,7 @@ TakeOrderedAndProject (50) +- ReusedExchange (40) -(1) Scan parquet spark_catalog.default.store_returns +(1) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -63,7 +63,7 @@ ReadSchema: struct Input [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3] Condition : isnotnull(sr_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_item_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -87,7 +87,7 @@ Arguments: [sr_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [5]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3, i_item_sk#5, i_item_id#6] Arguments: [sr_return_quantity#2, sr_returned_date_sk#3, i_item_id#6], [sr_return_quantity#2, sr_returned_date_sk#3, i_item_id#6] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -144,7 +144,7 @@ Functions [1]: [sum(sr_return_quantity#2)] Aggregate Attributes [1]: [sum(sr_return_quantity#2)#12] Results [2]: [i_item_id#6 AS item_id#13, sum(sr_return_quantity#2)#12 AS sr_item_qty#14] -(20) Scan parquet spark_catalog.default.catalog_returns +(20) CometScan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#15, cr_return_quantity#16, cr_returned_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -215,7 +215,7 @@ Join condition: None Output [3]: [item_id#13, sr_item_qty#14, cr_item_qty#26] Input [4]: [item_id#13, sr_item_qty#14, item_id#25, cr_item_qty#26] -(35) Scan parquet spark_catalog.default.web_returns +(35) CometScan parquet spark_catalog.default.web_returns Output [3]: [wr_item_sk#27, wr_return_quantity#28, wr_returned_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -309,7 +309,7 @@ BroadcastExchange (64) +- CometScan parquet spark_catalog.default.date_dim (54) -(51) Scan parquet spark_catalog.default.date_dim +(51) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -320,13 +320,13 @@ ReadSchema: struct Input [2]: [d_date_sk#7, d_date#8] Condition : isnotnull(d_date_sk#7) -(53) Scan parquet spark_catalog.default.date_dim +(53) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#9, d_week_seq#43] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] ReadSchema: struct -(54) Scan parquet spark_catalog.default.date_dim +(54) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#44, d_week_seq#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q84/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q84/explain.txt index 468af40d2..572fd7a66 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q84/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q84/explain.txt @@ -33,7 +33,7 @@ TakeOrderedAndProject (32) +- CometScan parquet spark_catalog.default.store_returns (26) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [6]: [c_customer_id#1, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, c_first_name#5, c_last_name#6] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -44,7 +44,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -64,7 +64,7 @@ Arguments: [ws_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3, d_date_sk#5] Arguments: [ws_item_sk#1, ws_net_paid#2], [ws_item_sk#1, ws_net_paid#2] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#7, i_class#8, i_category#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -143,7 +143,7 @@ BroadcastExchange (28) +- CometScan parquet spark_catalog.default.date_dim (24) -(24) Scan parquet spark_catalog.default.date_dim +(24) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q87/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q87/explain.txt index d023b5b31..28b942e2f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q87/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q87/explain.txt @@ -50,7 +50,7 @@ +- ReusedExchange (37) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Batched: true Location: InMemoryFileIndex [] @@ -62,7 +62,7 @@ ReadSchema: struct Input [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Condition : isnotnull(ss_customer_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -90,7 +90,7 @@ Arguments: [ss_sold_date_sk#2], [d_date_sk#4], Inner, BuildRight Input [4]: [ss_customer_sk#1, ss_sold_date_sk#2, d_date_sk#4, d_date#5] Arguments: [ss_customer_sk#1, d_date#5], [ss_customer_sk#1, d_date#5] -(9) Scan parquet spark_catalog.default.customer +(9) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#7, c_first_name#8, c_last_name#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -131,7 +131,7 @@ Functions: [] (17) ColumnarToRow [codegen id : 3] Input [3]: [c_last_name#9, c_first_name#8, d_date#5] -(18) Scan parquet spark_catalog.default.catalog_sales +(18) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_bill_customer_sk#10, cs_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -194,7 +194,7 @@ Right keys [6]: [coalesce(c_last_name#17, ), isnull(c_last_name#17), coalesce(c_ Join type: LeftAnti Join condition: None -(32) Scan parquet spark_catalog.default.web_sales +(32) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#18, ws_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -289,7 +289,7 @@ BroadcastExchange (54) +- CometScan parquet spark_catalog.default.date_dim (50) -(50) Scan parquet spark_catalog.default.date_dim +(50) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q88/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q88/explain.txt index 1dface7ea..592e23cd2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q88/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q88/explain.txt @@ -173,7 +173,7 @@ +- ReusedExchange (164) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -188,7 +188,7 @@ Condition : ((isnotnull(ss_hdemo_sk#2) AND isnotnull(ss_sold_time_sk#1)) AND isn Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Arguments: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3], [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3] -(4) Scan parquet spark_catalog.default.household_demographics +(4) CometScan parquet spark_catalog.default.household_demographics Output [3]: [hd_demo_sk#5, hd_dep_count#6, hd_vehicle_count#7] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -216,7 +216,7 @@ Arguments: [ss_hdemo_sk#2], [hd_demo_sk#5], Inner, BuildRight Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, hd_demo_sk#5] Arguments: [ss_sold_time_sk#1, ss_store_sk#3], [ss_sold_time_sk#1, ss_store_sk#3] -(10) Scan parquet spark_catalog.default.time_dim +(10) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#8, t_hour#9, t_minute#10] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -244,7 +244,7 @@ Arguments: [ss_sold_time_sk#1], [t_time_sk#8], Inner, BuildRight Input [3]: [ss_sold_time_sk#1, ss_store_sk#3, t_time_sk#8] Arguments: [ss_store_sk#3], [ss_store_sk#3] -(16) Scan parquet spark_catalog.default.store +(16) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#11, s_store_name#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -288,7 +288,7 @@ Functions [1]: [count(1)] (25) ColumnarToRow [codegen id : 8] Input [1]: [h8_30_to_9#14] -(26) Scan parquet spark_catalog.default.store_sales +(26) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#15, ss_hdemo_sk#16, ss_store_sk#17, ss_sold_date_sk#18] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -315,7 +315,7 @@ Arguments: [ss_hdemo_sk#16], [hd_demo_sk#19], Inner, BuildRight Input [4]: [ss_sold_time_sk#15, ss_hdemo_sk#16, ss_store_sk#17, hd_demo_sk#19] Arguments: [ss_sold_time_sk#15, ss_store_sk#17], [ss_sold_time_sk#15, ss_store_sk#17] -(32) Scan parquet spark_catalog.default.time_dim +(32) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#20, t_hour#21, t_minute#22] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -379,7 +379,7 @@ Arguments: IdentityBroadcastMode, [plan_id=3] Join type: Inner Join condition: None -(47) Scan parquet spark_catalog.default.store_sales +(47) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#26, ss_hdemo_sk#27, ss_store_sk#28, ss_sold_date_sk#29] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -406,7 +406,7 @@ Arguments: [ss_hdemo_sk#27], [hd_demo_sk#30], Inner, BuildRight Input [4]: [ss_sold_time_sk#26, ss_hdemo_sk#27, ss_store_sk#28, hd_demo_sk#30] Arguments: [ss_sold_time_sk#26, ss_store_sk#28], [ss_sold_time_sk#26, ss_store_sk#28] -(53) Scan parquet spark_catalog.default.time_dim +(53) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#31, t_hour#32, t_minute#33] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -470,7 +470,7 @@ Arguments: IdentityBroadcastMode, [plan_id=5] Join type: Inner Join condition: None -(68) Scan parquet spark_catalog.default.store_sales +(68) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#37, ss_hdemo_sk#38, ss_store_sk#39, ss_sold_date_sk#40] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -497,7 +497,7 @@ Arguments: [ss_hdemo_sk#38], [hd_demo_sk#41], Inner, BuildRight Input [4]: [ss_sold_time_sk#37, ss_hdemo_sk#38, ss_store_sk#39, hd_demo_sk#41] Arguments: [ss_sold_time_sk#37, ss_store_sk#39], [ss_sold_time_sk#37, ss_store_sk#39] -(74) Scan parquet spark_catalog.default.time_dim +(74) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#42, t_hour#43, t_minute#44] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -561,7 +561,7 @@ Arguments: IdentityBroadcastMode, [plan_id=7] Join type: Inner Join condition: None -(89) Scan parquet spark_catalog.default.store_sales +(89) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#48, ss_hdemo_sk#49, ss_store_sk#50, ss_sold_date_sk#51] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -588,7 +588,7 @@ Arguments: [ss_hdemo_sk#49], [hd_demo_sk#52], Inner, BuildRight Input [4]: [ss_sold_time_sk#48, ss_hdemo_sk#49, ss_store_sk#50, hd_demo_sk#52] Arguments: [ss_sold_time_sk#48, ss_store_sk#50], [ss_sold_time_sk#48, ss_store_sk#50] -(95) Scan parquet spark_catalog.default.time_dim +(95) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#53, t_hour#54, t_minute#55] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -652,7 +652,7 @@ Arguments: IdentityBroadcastMode, [plan_id=9] Join type: Inner Join condition: None -(110) Scan parquet spark_catalog.default.store_sales +(110) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#59, ss_hdemo_sk#60, ss_store_sk#61, ss_sold_date_sk#62] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -679,7 +679,7 @@ Arguments: [ss_hdemo_sk#60], [hd_demo_sk#63], Inner, BuildRight Input [4]: [ss_sold_time_sk#59, ss_hdemo_sk#60, ss_store_sk#61, hd_demo_sk#63] Arguments: [ss_sold_time_sk#59, ss_store_sk#61], [ss_sold_time_sk#59, ss_store_sk#61] -(116) Scan parquet spark_catalog.default.time_dim +(116) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#64, t_hour#65, t_minute#66] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -743,7 +743,7 @@ Arguments: IdentityBroadcastMode, [plan_id=11] Join type: Inner Join condition: None -(131) Scan parquet spark_catalog.default.store_sales +(131) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#70, ss_hdemo_sk#71, ss_store_sk#72, ss_sold_date_sk#73] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -770,7 +770,7 @@ Arguments: [ss_hdemo_sk#71], [hd_demo_sk#74], Inner, BuildRight Input [4]: [ss_sold_time_sk#70, ss_hdemo_sk#71, ss_store_sk#72, hd_demo_sk#74] Arguments: [ss_sold_time_sk#70, ss_store_sk#72], [ss_sold_time_sk#70, ss_store_sk#72] -(137) Scan parquet spark_catalog.default.time_dim +(137) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#75, t_hour#76, t_minute#77] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -834,7 +834,7 @@ Arguments: IdentityBroadcastMode, [plan_id=13] Join type: Inner Join condition: None -(152) Scan parquet spark_catalog.default.store_sales +(152) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#81, ss_hdemo_sk#82, ss_store_sk#83, ss_sold_date_sk#84] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -861,7 +861,7 @@ Arguments: [ss_hdemo_sk#82], [hd_demo_sk#85], Inner, BuildRight Input [4]: [ss_sold_time_sk#81, ss_hdemo_sk#82, ss_store_sk#83, hd_demo_sk#85] Arguments: [ss_sold_time_sk#81, ss_store_sk#83], [ss_sold_time_sk#81, ss_store_sk#83] -(158) Scan parquet spark_catalog.default.time_dim +(158) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#86, t_hour#87, t_minute#88] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q89/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q89/explain.txt index 07761b14b..32dc4ffa7 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q89/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q89/explain.txt @@ -29,7 +29,7 @@ TakeOrderedAndProject (28) +- CometScan parquet spark_catalog.default.store (14) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -40,7 +40,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_ext_discount_amt#2, ws_sold_date_sk#3] Condition : (isnotnull(ws_item_sk#1) AND isnotnull(ws_ext_discount_amt#2)) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_manufact_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -75,7 +75,7 @@ Arguments: [ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5], [ws_ext_disc (9) ColumnarToRow [codegen id : 4] Input [3]: [ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5] -(10) Scan parquet spark_catalog.default.web_sales +(10) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#7, ws_ext_discount_amt#8, ws_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -87,7 +87,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#7, ws_ext_discount_amt#8, ws_sold_date_sk#9] Condition : isnotnull(ws_item_sk#7) -(12) Scan parquet spark_catalog.default.date_dim +(12) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -195,7 +195,7 @@ BroadcastExchange (36) +- CometScan parquet spark_catalog.default.date_dim (32) -(32) Scan parquet spark_catalog.default.date_dim +(32) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#19, d_date#24] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q93/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q93/explain.txt index aa92d095a..d6e9142db 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q93/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q93/explain.txt @@ -23,7 +23,7 @@ TakeOrderedAndProject (22) +- CometScan parquet spark_catalog.default.reason (12) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -41,7 +41,7 @@ Arguments: hashpartitioning(ss_item_sk#1, ss_ticket_number#3, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5], [ss_item_sk#1 ASC NULLS FIRST, ss_ticket_number#3 ASC NULLS FIRST] -(5) Scan parquet spark_catalog.default.store_returns +(5) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#7, sr_reason_sk#8, sr_ticket_number#9, sr_return_quantity#10, sr_returned_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -73,7 +73,7 @@ Arguments: [ss_item_sk#1, ss_ticket_number#3], [sr_item_sk#7, sr_ticket_number#9 Input [9]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5, sr_item_sk#7, sr_reason_sk#8, sr_ticket_number#9, sr_return_quantity#10] Arguments: [ss_customer_sk#2, ss_quantity#4, ss_sales_price#5, sr_reason_sk#8, sr_return_quantity#10], [ss_customer_sk#2, ss_quantity#4, ss_sales_price#5, sr_reason_sk#8, sr_return_quantity#10] -(12) Scan parquet spark_catalog.default.reason +(12) CometScan parquet spark_catalog.default.reason Output [2]: [r_reason_sk#12, r_reason_desc#13] Batched: true Location [not included in comparison]/{warehouse_dir}/reason] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q94/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q94/explain.txt index c730aa557..70aaeb9b6 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q94/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q94/explain.txt @@ -41,7 +41,7 @@ +- CometScan parquet spark_catalog.default.web_site (29) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [8]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, ws_sold_date_sk#8] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -64,7 +64,7 @@ Arguments: hashpartitioning(ws_order_number#5, 5), ENSURE_REQUIREMENTS, CometNat Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Arguments: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_order_number#5 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.web_sales +(6) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_warehouse_sk#9, ws_order_number#10, ws_sold_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -91,7 +91,7 @@ Arguments: [ws_order_number#5], [ws_order_number#10], LeftSemi, NOT (ws_warehous Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Arguments: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(12) Scan parquet spark_catalog.default.web_returns +(12) CometScan parquet spark_catalog.default.web_returns Output [2]: [wr_order_number#12, wr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -114,7 +114,7 @@ Left output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_ord Right output [1]: [wr_order_number#12] Arguments: [ws_order_number#5], [wr_order_number#12], LeftAnti -(17) Scan parquet spark_catalog.default.date_dim +(17) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -142,7 +142,7 @@ Arguments: [ws_ship_date_sk#1], [d_date_sk#14], Inner, BuildRight Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, d_date_sk#14] Arguments: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(23) Scan parquet spark_catalog.default.customer_address +(23) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_state#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -170,7 +170,7 @@ Arguments: [ws_ship_addr_sk#2], [ca_address_sk#16], Inner, BuildRight Input [6]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, ca_address_sk#16] Arguments: [ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(29) Scan parquet spark_catalog.default.web_site +(29) CometScan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#18, web_company_name#19] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q95/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q95/explain.txt index 0f8276eee..4963b3194 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q95/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q95/explain.txt @@ -54,7 +54,7 @@ +- CometScan parquet spark_catalog.default.web_site (42) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, ws_sold_date_sk#7] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -77,7 +77,7 @@ Arguments: hashpartitioning(ws_order_number#4, 5), ENSURE_REQUIREMENTS, CometNat Input [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] Arguments: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6], [ws_order_number#4 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.web_sales +(6) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_warehouse_sk#8, ws_order_number#9, ws_sold_date_sk#10] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -121,7 +121,7 @@ Left output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_ord Right output [1]: [ws_order_number#9] Arguments: [ws_order_number#4], [ws_order_number#9], LeftSemi -(16) Scan parquet spark_catalog.default.web_returns +(16) CometScan parquet spark_catalog.default.web_returns Output [2]: [wr_order_number#13, wr_returned_date_sk#14] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -181,7 +181,7 @@ Left output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_ord Right output [1]: [wr_order_number#13] Arguments: [ws_order_number#4], [wr_order_number#13], LeftSemi -(30) Scan parquet spark_catalog.default.date_dim +(30) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#19, d_date#20] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -209,7 +209,7 @@ Arguments: [ws_ship_date_sk#1], [d_date_sk#19], Inner, BuildRight Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, d_date_sk#19] Arguments: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6], [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] -(36) Scan parquet spark_catalog.default.customer_address +(36) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#21, ca_state#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -237,7 +237,7 @@ Arguments: [ws_ship_addr_sk#2], [ca_address_sk#21], Inner, BuildRight Input [6]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, ca_address_sk#21] Arguments: [ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6], [ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] -(42) Scan parquet spark_catalog.default.web_site +(42) CometScan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#23, web_company_name#24] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q96/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q96/explain.txt index c4b2cf973..d2e63bee2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q96/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q96/explain.txt @@ -26,7 +26,7 @@ +- CometScan parquet spark_catalog.default.store (16) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -41,7 +41,7 @@ Condition : ((isnotnull(ss_hdemo_sk#2) AND isnotnull(ss_sold_time_sk#1)) AND isn Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Arguments: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3], [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3] -(4) Scan parquet spark_catalog.default.household_demographics +(4) CometScan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#5, hd_dep_count#6] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -69,7 +69,7 @@ Arguments: [ss_hdemo_sk#2], [hd_demo_sk#5], Inner, BuildRight Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, hd_demo_sk#5] Arguments: [ss_sold_time_sk#1, ss_store_sk#3], [ss_sold_time_sk#1, ss_store_sk#3] -(10) Scan parquet spark_catalog.default.time_dim +(10) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#7, t_hour#8, t_minute#9] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -97,7 +97,7 @@ Arguments: [ss_sold_time_sk#1], [t_time_sk#7], Inner, BuildRight Input [3]: [ss_sold_time_sk#1, ss_store_sk#3, t_time_sk#7] Arguments: [ss_store_sk#3], [ss_store_sk#3] -(16) Scan parquet spark_catalog.default.store +(16) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#10, s_store_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q97/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q97/explain.txt index 9576c4ef6..1be5c9b94 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q97/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q97/explain.txt @@ -26,14 +26,14 @@ +- ReusedExchange (13) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_customer_sk#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] PartitionFilters: [isnotnull(ss_sold_date_sk#3), dynamicpruningexpression(ss_sold_date_sk#3 IN dynamicpruning#4)] ReadSchema: struct -(2) Scan parquet spark_catalog.default.date_dim +(2) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -79,7 +79,7 @@ Functions: [] Input [2]: [customer_sk#7, item_sk#8] Arguments: [customer_sk#7, item_sk#8], [customer_sk#7 ASC NULLS FIRST, item_sk#8 ASC NULLS FIRST] -(12) Scan parquet spark_catalog.default.catalog_sales +(12) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#9, cs_item_sk#10, cs_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -156,7 +156,7 @@ BroadcastExchange (30) +- CometScan parquet spark_catalog.default.date_dim (26) -(26) Scan parquet spark_catalog.default.date_dim +(26) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q98/explain.txt index 7d97561f5..b750a8833 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q98/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q98/explain.txt @@ -26,7 +26,7 @@ +- CometScan parquet spark_catalog.default.date_dim (8) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -38,7 +38,7 @@ ReadSchema: struct Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -62,7 +62,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -152,7 +152,7 @@ BroadcastExchange (30) +- CometScan parquet spark_catalog.default.date_dim (26) -(26) Scan parquet spark_catalog.default.date_dim +(26) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q99/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q99/explain.txt index a6282e50c..f94f48c20 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q99/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q99/explain.txt @@ -29,7 +29,7 @@ TakeOrderedAndProject (28) +- CometScan parquet spark_catalog.default.date_dim (18) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [5]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_ship_mode_sk#3, cs_warehouse_sk#4, cs_sold_date_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -40,7 +40,7 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -104,7 +104,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#12, ws_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -136,7 +136,7 @@ Right keys [1]: [ws_bill_customer_sk#12] Join type: ExistenceJoin(exists#2) Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -176,7 +176,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(29) Scan parquet spark_catalog.default.customer_address +(29) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_county#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -208,7 +208,7 @@ Join condition: None Output [1]: [c_current_cdemo_sk#4] Input [3]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#20] -(36) Scan parquet spark_catalog.default.customer_demographics +(36) CometScan parquet spark_catalog.default.customer_demographics Output [9]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_education_status#25, cd_purchase_estimate#26, cd_credit_rating#27, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -268,7 +268,7 @@ BroadcastExchange (50) +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q11/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q11/explain.txt index 0346efa54..609ab39c8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q11/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q11/explain.txt @@ -70,7 +70,7 @@ +- ReusedExchange (59) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -81,7 +81,7 @@ ReadSchema: struct 0.00)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#19, c_customer_id#20, c_first_name#21, c_last_name#22, c_preferred_cust_flag#23, c_birth_country#24, c_login#25, c_email_address#26] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -159,7 +159,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [ws_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q13/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q13/explain.txt index 7d035666a..e3c357b40 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q13/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q13/explain.txt @@ -34,7 +34,7 @@ +- CometScan parquet spark_catalog.default.household_demographics (25) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [10]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -46,7 +46,7 @@ ReadSchema: struct= 100.00) AND (ss_net_profit#9 <= 200.00)) OR ((ss_net_profit#9 >= 150.00) AND (ss_net_profit#9 <= 300.00))) OR ((ss_net_profit#9 >= 50.00) AND (ss_net_profit#9 <= 250.00)))) AND ((((ss_sales_price#6 >= 100.00) AND (ss_sales_price#6 <= 150.00)) OR ((ss_sales_price#6 >= 50.00) AND (ss_sales_price#6 <= 100.00))) OR ((ss_sales_price#6 >= 150.00) AND (ss_sales_price#6 <= 200.00)))) -(3) Scan parquet spark_catalog.default.store +(3) CometScan parquet spark_catalog.default.store Output [1]: [s_store_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -70,7 +70,7 @@ Arguments: [ss_store_sk#4], [s_store_sk#12], Inner, BuildRight Input [11]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10, s_store_sk#12] Arguments: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10], [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10] -(8) Scan parquet spark_catalog.default.customer_address +(8) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#13, ca_state#14, ca_country#15] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -98,7 +98,7 @@ Arguments: [ss_addr_sk#3], [ca_address_sk#13], Inner, ((((ca_state#14 IN (TX,OH) Input [11]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10, ca_address_sk#13, ca_state#14] Arguments: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10], [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10] -(14) Scan parquet spark_catalog.default.date_dim +(14) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -126,7 +126,7 @@ Arguments: [ss_sold_date_sk#10], [d_date_sk#16], Inner, BuildRight Input [8]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10, d_date_sk#16] Arguments: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8], [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8] -(20) Scan parquet spark_catalog.default.customer_demographics +(20) CometScan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#18, cd_marital_status#19, cd_education_status#20] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -150,7 +150,7 @@ Arguments: [ss_cdemo_sk#1], [cd_demo_sk#18], Inner, ((((((cd_marital_status#19 = Input [9]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_demo_sk#18, cd_marital_status#19, cd_education_status#20] Arguments: [ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_marital_status#19, cd_education_status#20], [ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_marital_status#19, cd_education_status#20] -(25) Scan parquet spark_catalog.default.household_demographics +(25) CometScan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#21, hd_dep_count#22] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -201,7 +201,7 @@ BroadcastExchange (38) +- CometScan parquet spark_catalog.default.date_dim (34) -(34) Scan parquet spark_catalog.default.date_dim +(34) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a/explain.txt index c7dc4b3c0..3aae3a13e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a/explain.txt @@ -103,7 +103,7 @@ +- ReusedExchange (88) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -115,7 +115,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -126,7 +126,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -149,7 +149,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -185,7 +185,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -261,7 +261,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -328,7 +328,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -360,7 +360,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -410,7 +410,7 @@ Condition : (isnotnull(sales#49) AND (cast(sales#49 as decimal(32,6)) > cast(Sub Input [5]: [i_brand_id#40, i_class_id#41, i_category_id#42, sales#49, number_sales#50] Arguments: [sales#49, number_sales#50, channel#53, i_brand_id#54, i_class_id#55, i_category_id#56], [sales#49, number_sales#50, store AS channel#53, i_brand_id#40 AS i_brand_id#54, i_class_id#41 AS i_class_id#55, i_category_id#42 AS i_category_id#56] -(66) Scan parquet spark_catalog.default.catalog_sales +(66) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#57, cs_quantity#58, cs_list_price#59, cs_sold_date_sk#60] Batched: true Location: InMemoryFileIndex [] @@ -476,7 +476,7 @@ Condition : (isnotnull(sales#70) AND (cast(sales#70 as decimal(32,6)) > cast(Reu Input [5]: [i_brand_id#63, i_class_id#64, i_category_id#65, sales#70, number_sales#71] Arguments: [sales#70, number_sales#71, channel#72, i_brand_id#63, i_class_id#64, i_category_id#65], [sales#70, number_sales#71, catalog AS channel#72, i_brand_id#63, i_class_id#64, i_category_id#65] -(81) Scan parquet spark_catalog.default.web_sales +(81) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#73, ws_quantity#74, ws_list_price#75, ws_sold_date_sk#76] Batched: true Location: InMemoryFileIndex [] @@ -594,7 +594,7 @@ Subquery:1 Hosting operator id = 64 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (112) -(103) Scan parquet spark_catalog.default.store_sales +(103) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#99, ss_list_price#100, ss_sold_date_sk#101] Batched: true Location: InMemoryFileIndex [] @@ -613,7 +613,7 @@ Arguments: [ss_sold_date_sk#101], [d_date_sk#103], Inner, BuildRight Input [4]: [ss_quantity#99, ss_list_price#100, ss_sold_date_sk#101, d_date_sk#103] Arguments: [quantity#104, list_price#105], [ss_quantity#99 AS quantity#104, ss_list_price#100 AS list_price#105] -(107) Scan parquet spark_catalog.default.catalog_sales +(107) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#106, cs_list_price#107, cs_sold_date_sk#108] Batched: true Location: InMemoryFileIndex [] @@ -632,7 +632,7 @@ Arguments: [cs_sold_date_sk#108], [d_date_sk#110], Inner, BuildRight Input [4]: [cs_quantity#106, cs_list_price#107, cs_sold_date_sk#108, d_date_sk#110] Arguments: [quantity#111, list_price#112], [cs_quantity#106 AS quantity#111, cs_list_price#107 AS list_price#112] -(111) Scan parquet spark_catalog.default.web_sales +(111) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#113, ws_list_price#114, ws_sold_date_sk#115] Batched: true Location: InMemoryFileIndex [] @@ -687,7 +687,7 @@ BroadcastExchange (124) +- CometScan parquet spark_catalog.default.date_dim (120) -(120) Scan parquet spark_catalog.default.date_dim +(120) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -717,7 +717,7 @@ BroadcastExchange (129) +- CometScan parquet spark_catalog.default.date_dim (125) -(125) Scan parquet spark_catalog.default.date_dim +(125) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#123] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14b/explain.txt index 9585c3214..47feaf898 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14b/explain.txt @@ -86,7 +86,7 @@ +- CometScan parquet spark_catalog.default.date_dim (72) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -109,7 +109,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -132,7 +132,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -168,7 +168,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -244,7 +244,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -311,7 +311,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -343,7 +343,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -389,7 +389,7 @@ Functions [2]: [sum((cast(ss_quantity#2 as decimal(10,0)) * ss_list_price#3)), c Input [6]: [channel#50, i_brand_id#40, i_class_id#41, i_category_id#42, sales#51, number_sales#52] Condition : (isnotnull(sales#51) AND (cast(sales#51 as decimal(32,6)) > cast(Subquery scalar-subquery#53, [id=#54] as decimal(32,6)))) -(65) Scan parquet spark_catalog.default.store_sales +(65) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#55, ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -421,7 +421,7 @@ Arguments: [ss_item_sk#55], [i_item_sk#60], Inner, BuildRight Input [8]: [ss_item_sk#55, ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_item_sk#60, i_brand_id#61, i_class_id#62, i_category_id#63] Arguments: [ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_brand_id#61, i_class_id#62, i_category_id#63], [ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_brand_id#61, i_class_id#62, i_category_id#63] -(72) Scan parquet spark_catalog.default.date_dim +(72) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#64, d_week_seq#65] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -505,7 +505,7 @@ Subquery:1 Hosting operator id = 64 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (95) -(86) Scan parquet spark_catalog.default.store_sales +(86) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#74, ss_list_price#75, ss_sold_date_sk#76] Batched: true Location: InMemoryFileIndex [] @@ -524,7 +524,7 @@ Arguments: [ss_sold_date_sk#76], [d_date_sk#78], Inner, BuildRight Input [4]: [ss_quantity#74, ss_list_price#75, ss_sold_date_sk#76, d_date_sk#78] Arguments: [quantity#79, list_price#80], [ss_quantity#74 AS quantity#79, ss_list_price#75 AS list_price#80] -(90) Scan parquet spark_catalog.default.catalog_sales +(90) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#81, cs_list_price#82, cs_sold_date_sk#83] Batched: true Location: InMemoryFileIndex [] @@ -543,7 +543,7 @@ Arguments: [cs_sold_date_sk#83], [d_date_sk#85], Inner, BuildRight Input [4]: [cs_quantity#81, cs_list_price#82, cs_sold_date_sk#83, d_date_sk#85] Arguments: [quantity#86, list_price#87], [cs_quantity#81 AS quantity#86, cs_list_price#82 AS list_price#87] -(94) Scan parquet spark_catalog.default.web_sales +(94) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#88, ws_list_price#89, ws_sold_date_sk#90] Batched: true Location: InMemoryFileIndex [] @@ -598,7 +598,7 @@ BroadcastExchange (107) +- CometScan parquet spark_catalog.default.date_dim (103) -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -627,7 +627,7 @@ Subquery:6 Hosting operator id = 104 Hosting Expression = Subquery scalar-subque +- CometScan parquet spark_catalog.default.date_dim (108) -(108) Scan parquet spark_catalog.default.date_dim +(108) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#98, d_year#99, d_moy#100, d_dom#101] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -653,7 +653,7 @@ BroadcastExchange (116) +- CometScan parquet spark_catalog.default.date_dim (112) -(112) Scan parquet spark_catalog.default.date_dim +(112) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#102] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -691,7 +691,7 @@ BroadcastExchange (121) +- CometScan parquet spark_catalog.default.date_dim (117) -(117) Scan parquet spark_catalog.default.date_dim +(117) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#64, d_week_seq#65] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -720,7 +720,7 @@ Subquery:13 Hosting operator id = 118 Hosting Expression = Subquery scalar-subqu +- CometScan parquet spark_catalog.default.date_dim (122) -(122) Scan parquet spark_catalog.default.date_dim +(122) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#103, d_year#104, d_moy#105, d_dom#106] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q15/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q15/explain.txt index 842c3e576..037c52626 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q15/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q15/explain.txt @@ -24,7 +24,7 @@ +- CometScan parquet spark_catalog.default.date_dim (13) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -36,7 +36,7 @@ ReadSchema: struct Input [3]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3] Condition : isnotnull(cs_bill_customer_sk#1) -(3) Scan parquet spark_catalog.default.customer +(3) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#5, c_current_addr_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -60,7 +60,7 @@ Arguments: [cs_bill_customer_sk#1], [c_customer_sk#5], Inner, BuildRight Input [5]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3, c_customer_sk#5, c_current_addr_sk#6] Arguments: [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6], [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6] -(8) Scan parquet spark_catalog.default.customer_address +(8) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#7, ca_state#8, ca_zip#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -84,7 +84,7 @@ Arguments: [c_current_addr_sk#6], [ca_address_sk#7], Inner, ((substr(ca_zip#9, 1 Input [6]: [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6, ca_address_sk#7, ca_state#8, ca_zip#9] Arguments: [cs_sales_price#2, cs_sold_date_sk#3, ca_zip#9], [cs_sales_price#2, cs_sold_date_sk#3, ca_zip#9] -(13) Scan parquet spark_catalog.default.date_dim +(13) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#11, d_qoy#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -143,7 +143,7 @@ BroadcastExchange (28) +- CometScan parquet spark_catalog.default.date_dim (24) -(24) Scan parquet spark_catalog.default.date_dim +(24) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#11, d_qoy#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt index 0f3c217d5..acd12b277 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt @@ -41,7 +41,7 @@ +- CometScan parquet spark_catalog.default.call_center (29) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [8]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, cs_sold_date_sk#8] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -64,7 +64,7 @@ Arguments: hashpartitioning(cs_order_number#5, 5), ENSURE_REQUIREMENTS, CometNat Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Arguments: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_order_number#5 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.catalog_sales +(6) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_warehouse_sk#9, cs_order_number#10, cs_sold_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -91,7 +91,7 @@ Arguments: [cs_order_number#5], [cs_order_number#10], LeftSemi, NOT (cs_warehous Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Arguments: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(12) Scan parquet spark_catalog.default.catalog_returns +(12) CometScan parquet spark_catalog.default.catalog_returns Output [2]: [cr_order_number#12, cr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -114,7 +114,7 @@ Left output [6]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_ Right output [1]: [cr_order_number#12] Arguments: [cs_order_number#5], [cr_order_number#12], LeftAnti -(17) Scan parquet spark_catalog.default.date_dim +(17) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -142,7 +142,7 @@ Arguments: [cs_ship_date_sk#1], [d_date_sk#14], Inner, BuildRight Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, d_date_sk#14] Arguments: [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(23) Scan parquet spark_catalog.default.customer_address +(23) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_state#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -170,7 +170,7 @@ Arguments: [cs_ship_addr_sk#2], [ca_address_sk#16], Inner, BuildRight Input [6]: [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, ca_address_sk#16] Arguments: [cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7], [cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(29) Scan parquet spark_catalog.default.call_center +(29) CometScan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#18, cc_county#19] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q17/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q17/explain.txt index 69f720e6f..496ec2f1a 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q17/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q17/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.item (33) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_quantity#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -55,7 +55,7 @@ ReadSchema: struct Input [2]: [ws_ext_sales_price#1, ws_sold_date_sk#2] Arguments: [sold_date_sk#3, sales_price#4], [ws_sold_date_sk#2 AS sold_date_sk#3, ws_ext_sales_price#1 AS sales_price#4] -(3) Scan parquet spark_catalog.default.catalog_sales +(3) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ext_sales_price#5, cs_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [sold_date_sk#7, sales_price#8], [cs_sold_date_sk#6 AS sold_date_sk#7 Child 0 Input [2]: [sold_date_sk#3, sales_price#4] Child 1 Input [2]: [sold_date_sk#7, sales_price#8] -(6) Scan parquet spark_catalog.default.date_dim +(6) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_week_seq#10, d_day_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -98,7 +98,7 @@ Input [8]: [d_week_seq#10, sum#12, sum#13, sum#14, sum#15, sum#16, sum#17, sum#1 Keys [1]: [d_week_seq#10] Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Monday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Tuesday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Wednesday) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Thursday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Friday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Saturday ) THEN sales_price#4 END))] -(14) Scan parquet spark_catalog.default.date_dim +(14) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_week_seq#19, d_year#20] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ Input [8]: [d_week_seq#10, sum#36, sum#37, sum#38, sum#39, sum#40, sum#41, sum#4 Keys [1]: [d_week_seq#10] Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Monday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Tuesday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Wednesday) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Thursday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Friday ) THEN sales_price#4 END)), sum(UnscaledValue(CASE WHEN (d_day_name#11 = Saturday ) THEN sales_price#4 END))] -(22) Scan parquet spark_catalog.default.date_dim +(22) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_week_seq#43, d_year#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt index 1af8f20f3..909acbf7f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt @@ -23,7 +23,7 @@ TakeOrderedAndProject (22) +- CometScan parquet spark_catalog.default.date_dim (8) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -35,7 +35,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Condition : isnotnull(cs_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [cs_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt index e432933e5..1bcad35c3 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt @@ -25,7 +25,7 @@ +- CometScan parquet spark_catalog.default.date_dim (14) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ ReadSchema: struct -(2) Scan parquet spark_catalog.default.store_sales +(2) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -87,7 +87,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#7, ss_sold_date_sk#8] Condition : isnotnull(ss_item_sk#7) -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -115,7 +115,7 @@ Arguments: [ss_sold_date_sk#8], [d_date_sk#10], Inner, BuildRight Input [4]: [ss_item_sk#7, ss_sold_date_sk#8, d_date_sk#10, d_date#11] Arguments: [ss_item_sk#7, d_date#11], [ss_item_sk#7, d_date#11] -(10) Scan parquet spark_catalog.default.item +(10) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#13, i_item_desc#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -182,7 +182,7 @@ Arguments: hashpartitioning(cs_bill_customer_sk#1, 5), ENSURE_REQUIREMENTS, Come Input [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Arguments: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5], [cs_bill_customer_sk#1 ASC NULLS FIRST] -(25) Scan parquet spark_catalog.default.store_sales +(25) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -197,7 +197,7 @@ Condition : isnotnull(ss_customer_sk#19) Input [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Arguments: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21], [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21] -(28) Scan parquet spark_catalog.default.customer +(28) CometScan parquet spark_catalog.default.customer Output [1]: [c_customer_sk#23] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -256,7 +256,7 @@ Arguments: [cs_bill_customer_sk#1], [c_customer_sk#23], LeftSemi Input [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Arguments: [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5], [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] -(41) Scan parquet spark_catalog.default.date_dim +(41) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#29, d_year#30, d_moy#31] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -284,7 +284,7 @@ Arguments: [cs_sold_date_sk#5], [d_date_sk#29], Inner, BuildRight Input [4]: [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5, d_date_sk#29] Arguments: [sales#32], [(cast(cs_quantity#3 as decimal(10,0)) * cs_list_price#4) AS sales#32] -(47) Scan parquet spark_catalog.default.web_sales +(47) CometScan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#33, ws_bill_customer_sk#34, ws_quantity#35, ws_list_price#36, ws_sold_date_sk#37] Batched: true Location: InMemoryFileIndex [] @@ -383,7 +383,7 @@ BroadcastExchange (72) +- CometScan parquet spark_catalog.default.date_dim (68) -(68) Scan parquet spark_catalog.default.date_dim +(68) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#29, d_year#30, d_moy#31] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -413,7 +413,7 @@ BroadcastExchange (77) +- CometScan parquet spark_catalog.default.date_dim (73) -(73) Scan parquet spark_catalog.default.date_dim +(73) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -456,7 +456,7 @@ Subquery:3 Hosting operator id = 36 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (83) -(78) Scan parquet spark_catalog.default.store_sales +(78) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#44, ss_quantity#45, ss_sales_price#46, ss_sold_date_sk#47] Batched: true Location: InMemoryFileIndex [] @@ -480,7 +480,7 @@ Arguments: [ss_customer_sk#44], [c_customer_sk#49], Inner, BuildRight Input [5]: [ss_customer_sk#44, ss_quantity#45, ss_sales_price#46, ss_sold_date_sk#47, c_customer_sk#49] Arguments: [ss_quantity#45, ss_sales_price#46, ss_sold_date_sk#47, c_customer_sk#49], [ss_quantity#45, ss_sales_price#46, ss_sold_date_sk#47, c_customer_sk#49] -(83) Scan parquet spark_catalog.default.date_dim +(83) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#50, d_year#51] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -547,7 +547,7 @@ BroadcastExchange (100) +- CometScan parquet spark_catalog.default.date_dim (96) -(96) Scan parquet spark_catalog.default.date_dim +(96) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#50, d_year#51] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q23b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q23b/explain.txt index cd97d73b1..4a4b3a41c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q23b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q23b/explain.txt @@ -88,7 +88,7 @@ +- ReusedExchange (79) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [5]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#7, ss_sold_date_sk#8] Condition : isnotnull(ss_item_sk#7) -(5) Scan parquet spark_catalog.default.date_dim +(5) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -140,7 +140,7 @@ Arguments: [ss_sold_date_sk#8], [d_date_sk#10], Inner, BuildRight Input [4]: [ss_item_sk#7, ss_sold_date_sk#8, d_date_sk#10, d_date#11] Arguments: [ss_item_sk#7, d_date#11], [ss_item_sk#7, d_date#11] -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#13, i_item_desc#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -207,7 +207,7 @@ Arguments: hashpartitioning(cs_bill_customer_sk#1, 5), ENSURE_REQUIREMENTS, Come Input [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Arguments: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5], [cs_bill_customer_sk#1 ASC NULLS FIRST] -(26) Scan parquet spark_catalog.default.store_sales +(26) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -222,7 +222,7 @@ Condition : isnotnull(ss_customer_sk#19) Input [4]: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21, ss_sold_date_sk#22] Arguments: [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21], [ss_customer_sk#19, ss_quantity#20, ss_sales_price#21] -(29) Scan parquet spark_catalog.default.customer +(29) CometScan parquet spark_catalog.default.customer Output [1]: [c_customer_sk#23] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -277,7 +277,7 @@ Left output [4]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold Right output [1]: [c_customer_sk#23] Arguments: [cs_bill_customer_sk#1], [c_customer_sk#23], LeftSemi -(41) Scan parquet spark_catalog.default.customer +(41) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#29, c_first_name#30, c_last_name#31] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -334,7 +334,7 @@ Arguments: [cs_bill_customer_sk#1], [c_customer_sk#29], Inner, BuildRight Input [7]: [cs_bill_customer_sk#1, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5, c_customer_sk#29, c_first_name#30, c_last_name#31] Arguments: [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5, c_first_name#30, c_last_name#31], [cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5, c_first_name#30, c_last_name#31] -(54) Scan parquet spark_catalog.default.date_dim +(54) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#32, d_year#33, d_moy#34] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -376,7 +376,7 @@ Input [4]: [c_last_name#31, c_first_name#30, sum#35, isEmpty#36] Keys [2]: [c_last_name#31, c_first_name#30] Functions [1]: [sum((cast(cs_quantity#3 as decimal(10,0)) * cs_list_price#4))] -(63) Scan parquet spark_catalog.default.web_sales +(63) CometScan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#37, ws_bill_customer_sk#38, ws_quantity#39, ws_list_price#40, ws_sold_date_sk#41] Batched: true Location: InMemoryFileIndex [] @@ -492,7 +492,7 @@ BroadcastExchange (92) +- CometScan parquet spark_catalog.default.date_dim (88) -(88) Scan parquet spark_catalog.default.date_dim +(88) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#32, d_year#33, d_moy#34] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -522,7 +522,7 @@ BroadcastExchange (97) +- CometScan parquet spark_catalog.default.date_dim (93) -(93) Scan parquet spark_catalog.default.date_dim +(93) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_date#11, d_year#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -565,7 +565,7 @@ Subquery:3 Hosting operator id = 37 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (103) -(98) Scan parquet spark_catalog.default.store_sales +(98) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#51, ss_quantity#52, ss_sales_price#53, ss_sold_date_sk#54] Batched: true Location: InMemoryFileIndex [] @@ -589,7 +589,7 @@ Arguments: [ss_customer_sk#51], [c_customer_sk#56], Inner, BuildRight Input [5]: [ss_customer_sk#51, ss_quantity#52, ss_sales_price#53, ss_sold_date_sk#54, c_customer_sk#56] Arguments: [ss_quantity#52, ss_sales_price#53, ss_sold_date_sk#54, c_customer_sk#56], [ss_quantity#52, ss_sales_price#53, ss_sold_date_sk#54, c_customer_sk#56] -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#57, d_year#58] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -656,7 +656,7 @@ BroadcastExchange (120) +- CometScan parquet spark_catalog.default.date_dim (116) -(116) Scan parquet spark_catalog.default.date_dim +(116) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#57, d_year#58] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q24a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q24a/explain.txt index d643cf7a3..2208ce266 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q24a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q24a/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.customer_address (30) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -66,7 +66,7 @@ Arguments: hashpartitioning(ss_ticket_number#4, ss_item_sk#1, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5], [ss_ticket_number#4 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.store_returns +(6) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#7, sr_ticket_number#8, sr_returned_date_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -98,7 +98,7 @@ Arguments: [ss_ticket_number#4, ss_item_sk#1], [sr_ticket_number#8, sr_item_sk#7 Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, sr_item_sk#7, sr_ticket_number#8] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5], [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [5]: [s_store_sk#10, s_store_name#11, s_market_id#12, s_state#13, s_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -126,7 +126,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#10], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14], [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -150,7 +150,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#15], Inner, BuildRight Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Arguments: [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20], [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] -(24) Scan parquet spark_catalog.default.customer +(24) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#21, c_first_name#22, c_last_name#23, c_birth_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -177,7 +177,7 @@ Arguments: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_pric (29) ColumnarToRow [codegen id : 2] Input [12]: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20, c_first_name#22, c_last_name#23, c_birth_country#24] -(30) Scan parquet spark_catalog.default.customer_address +(30) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_state#25, ca_zip#26, ca_country#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -312,7 +312,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#10], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14], [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] -(52) Scan parquet spark_catalog.default.item +(52) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q24b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q24b/explain.txt index 2bc198436..c3b956643 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q24b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q24b/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.customer_address (30) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -66,7 +66,7 @@ Arguments: hashpartitioning(ss_ticket_number#4, ss_item_sk#1, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5], [ss_ticket_number#4 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.store_returns +(6) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#7, sr_ticket_number#8, sr_returned_date_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -98,7 +98,7 @@ Arguments: [ss_ticket_number#4, ss_item_sk#1], [sr_ticket_number#8, sr_item_sk#7 Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, sr_item_sk#7, sr_ticket_number#8] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5], [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [5]: [s_store_sk#10, s_store_name#11, s_market_id#12, s_state#13, s_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -126,7 +126,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#10], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14], [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -150,7 +150,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#15], Inner, BuildRight Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Arguments: [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20], [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] -(24) Scan parquet spark_catalog.default.customer +(24) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#21, c_first_name#22, c_last_name#23, c_birth_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -177,7 +177,7 @@ Arguments: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_pric (29) ColumnarToRow [codegen id : 2] Input [12]: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20, c_first_name#22, c_last_name#23, c_birth_country#24] -(30) Scan parquet spark_catalog.default.customer_address +(30) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_state#25, ca_zip#26, ca_country#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -312,7 +312,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#10], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14], [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] -(52) Scan parquet spark_catalog.default.item +(52) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q25/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q25/explain.txt index 0d47bdcf1..6c7833548 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q25/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q25/explain.txt @@ -43,7 +43,7 @@ +- CometScan parquet spark_catalog.default.item (33) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_profit#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -55,7 +55,7 @@ ReadSchema: struct Input [3]: [ss_addr_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_addr_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -127,7 +127,7 @@ Arguments: [ss_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [6]: [ss_addr_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, d_date_sk#5, d_year#6, d_qoy#7] Arguments: [ss_addr_sk#1, ss_ext_sales_price#2, d_year#6, d_qoy#7], [ss_addr_sk#1, ss_ext_sales_price#2, d_year#6, d_qoy#7] -(8) Scan parquet spark_catalog.default.customer_address +(8) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#8, ca_county#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -165,7 +165,7 @@ Input [4]: [ca_county#9, d_qoy#7, d_year#6, sum#10] Keys [3]: [ca_county#9, d_qoy#7, d_year#6] Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#2))] -(16) Scan parquet spark_catalog.default.store_sales +(16) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#11, ss_ext_sales_price#12, ss_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -177,7 +177,7 @@ ReadSchema: struct Input [3]: [ss_addr_sk#11, ss_ext_sales_price#12, ss_sold_date_sk#13] Condition : isnotnull(ss_addr_sk#11) -(18) Scan parquet spark_catalog.default.date_dim +(18) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_year#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -236,7 +236,7 @@ Left output [3]: [ca_county#9, d_year#6, store_sales#22] Right output [2]: [ca_county#19, store_sales#21] Arguments: [ca_county#9], [ca_county#19], Inner, BuildRight -(31) Scan parquet spark_catalog.default.store_sales +(31) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#23, ss_ext_sales_price#24, ss_sold_date_sk#25] Batched: true Location: InMemoryFileIndex [] @@ -248,7 +248,7 @@ ReadSchema: struct Input [3]: [ss_addr_sk#23, ss_ext_sales_price#24, ss_sold_date_sk#25] Condition : isnotnull(ss_addr_sk#23) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#27, d_year#28, d_qoy#29] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -311,7 +311,7 @@ Arguments: [ca_county#19], [ca_county#31], Inner, BuildRight Input [7]: [ca_county#9, d_year#6, store_sales#22, ca_county#19, store_sales#21, ca_county#31, store_sales#33] Arguments: [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33], [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33] -(47) Scan parquet spark_catalog.default.web_sales +(47) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#34, ws_ext_sales_price#35, ws_sold_date_sk#36] Batched: true Location: InMemoryFileIndex [] @@ -370,7 +370,7 @@ Left output [5]: [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_s Right output [2]: [ca_county#42, web_sales#44] Arguments: [ca_county#9], [ca_county#42], Inner, BuildRight -(60) Scan parquet spark_catalog.default.web_sales +(60) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#45, ws_ext_sales_price#46, ws_sold_date_sk#47] Batched: true Location: InMemoryFileIndex [] @@ -433,7 +433,7 @@ Arguments: [ca_county#42], [ca_county#53], Inner, (CASE WHEN (web_sales#44 > 0.0 Input [9]: [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33, ca_county#42, web_sales#44, ca_county#53, web_sales#55] Arguments: [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33, ca_county#42, web_sales#44, web_sales#55], [ca_county#9, d_year#6, store_sales#22, store_sales#21, store_sales#33, ca_county#42, web_sales#44, web_sales#55] -(74) Scan parquet spark_catalog.default.web_sales +(74) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#56, ws_ext_sales_price#57, ws_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -516,7 +516,7 @@ BroadcastExchange (94) +- CometScan parquet spark_catalog.default.date_dim (91) -(91) Scan parquet spark_catalog.default.date_dim +(91) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -541,7 +541,7 @@ BroadcastExchange (98) +- CometScan parquet spark_catalog.default.date_dim (95) -(95) Scan parquet spark_catalog.default.date_dim +(95) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_year#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -566,7 +566,7 @@ BroadcastExchange (102) +- CometScan parquet spark_catalog.default.date_dim (99) -(99) Scan parquet spark_catalog.default.date_dim +(99) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#27, d_year#28, d_qoy#29] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt index a0b604678..03edc9859 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt @@ -31,7 +31,7 @@ +- ReusedExchange (24) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -43,7 +43,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3] Condition : (isnotnull(cs_item_sk#1) AND isnotnull(cs_ext_discount_amt#2)) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_manufact_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -71,7 +71,7 @@ Arguments: [cs_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [4]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5] Arguments: [cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5], [cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5] -(9) Scan parquet spark_catalog.default.catalog_sales +(9) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#7, cs_ext_discount_amt#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -83,7 +83,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#7, cs_ext_discount_amt#8, cs_sold_date_sk#9] Condition : isnotnull(cs_item_sk#7) -(11) Scan parquet spark_catalog.default.date_dim +(11) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -181,7 +181,7 @@ BroadcastExchange (35) +- CometScan parquet spark_catalog.default.date_dim (31) -(31) Scan parquet spark_catalog.default.date_dim +(31) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q33/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q33/explain.txt index a432ecd29..7a50fe69e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q33/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q33/explain.txt @@ -62,7 +62,7 @@ +- ReusedExchange (50) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -74,7 +74,7 @@ ReadSchema: struct Input [2]: [i_item_sk#11, i_manufact_id#12] Condition : isnotnull(i_item_sk#11) -(17) Scan parquet spark_catalog.default.item +(17) CometScan parquet spark_catalog.default.item Output [2]: [i_category#13, i_manufact_id#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -192,7 +192,7 @@ Input [2]: [i_manufact_id#12, sum#15] Keys [1]: [i_manufact_id#12] Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] -(28) Scan parquet spark_catalog.default.catalog_sales +(28) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#16, cs_item_sk#17, cs_ext_sales_price#18, cs_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -254,7 +254,7 @@ Input [2]: [i_manufact_id#24, sum#25] Keys [1]: [i_manufact_id#24] Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#18))] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#26, ws_bill_addr_sk#27, ws_ext_sales_price#28, ws_sold_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -352,7 +352,7 @@ BroadcastExchange (66) +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#7, d_moy#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q34/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q34/explain.txt index 7cfddd6a7..3223f7c72 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q34/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q34/explain.txt @@ -33,7 +33,7 @@ +- CometScan parquet spark_catalog.default.customer (25) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -45,7 +45,7 @@ ReadSchema: struct= 15) AND (cnt#17 <= 20)) -(25) Scan parquet spark_catalog.default.customer +(25) CometScan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -192,7 +192,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_dom#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q35/explain.txt index c06c1dd16..c1e19555c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q35/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q35/explain.txt @@ -45,7 +45,7 @@ TakeOrderedAndProject (44) +- CometScan parquet spark_catalog.default.customer_demographics (35) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -56,14 +56,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#12, ws_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ Right keys [1]: [ws_bill_customer_sk#12] Join type: ExistenceJoin(exists#2) Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -175,7 +175,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(29) Scan parquet spark_catalog.default.customer_address +(29) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_state#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -203,7 +203,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#4, ca_state#21] Input [4]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#20, ca_state#21] -(35) Scan parquet spark_catalog.default.customer_demographics +(35) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_dep_count#25, cd_dep_employed_count#26, cd_dep_college_count#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q36/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q36/explain.txt index 68397f3a2..5bd9122ff 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q36/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q36/explain.txt @@ -30,7 +30,7 @@ TakeOrderedAndProject (29) +- CometScan parquet spark_catalog.default.store (14) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -42,7 +42,7 @@ ReadSchema: struct= 68.00)) A Input [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(4) Scan parquet spark_catalog.default.inventory +(4) CometScan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#6, inv_quantity_on_hand#7, inv_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -71,7 +71,7 @@ Arguments: [i_item_sk#1], [inv_item_sk#6], Inner, BuildRight Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item_sk#6, inv_date_sk#8] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(17) Scan parquet spark_catalog.default.catalog_sales +(17) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_item_sk#12, cs_sold_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -158,7 +158,7 @@ BroadcastExchange (31) +- CometScan parquet spark_catalog.default.date_dim (27) -(27) Scan parquet spark_catalog.default.date_dim +(27) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q38/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q38/explain.txt index 598dccaf0..c0e7300df 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q38/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q38/explain.txt @@ -48,7 +48,7 @@ +- ReusedExchange (35) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ ReadSchema: struct Input [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Condition : isnotnull(ss_customer_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -88,7 +88,7 @@ Arguments: [ss_sold_date_sk#2], [d_date_sk#4], Inner, BuildRight Input [4]: [ss_customer_sk#1, ss_sold_date_sk#2, d_date_sk#4, d_date#5] Arguments: [ss_customer_sk#1, d_date#5], [ss_customer_sk#1, d_date#5] -(9) Scan parquet spark_catalog.default.customer +(9) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#7, c_first_name#8, c_last_name#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -126,7 +126,7 @@ Input [3]: [c_last_name#9, c_first_name#8, d_date#5] Keys [3]: [c_last_name#9, c_first_name#8, d_date#5] Functions: [] -(17) Scan parquet spark_catalog.default.catalog_sales +(17) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_bill_customer_sk#10, cs_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -185,7 +185,7 @@ Left output [3]: [c_last_name#9, c_first_name#8, d_date#5] Right output [3]: [c_last_name#17, c_first_name#16, d_date#14] Arguments: [coalesce(c_last_name#9, ), isnull(c_last_name#9), coalesce(c_first_name#8, ), isnull(c_first_name#8), coalesce(d_date#5, 1970-01-01), isnull(d_date#5)], [coalesce(c_last_name#17, ), isnull(c_last_name#17), coalesce(c_first_name#16, ), isnull(c_first_name#16), coalesce(d_date#14, 1970-01-01), isnull(d_date#14)], LeftSemi, BuildRight -(30) Scan parquet spark_catalog.default.web_sales +(30) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#18, ws_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -274,7 +274,7 @@ BroadcastExchange (52) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39a/explain.txt index 96e213fff..ef9e118a8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39a/explain.txt @@ -48,7 +48,7 @@ +- CometScan parquet spark_catalog.default.date_dim (32) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ ReadSchema: struct 0.000000)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#22, c_customer_id#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26, c_birth_country#27, c_login#28, c_email_address#29] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -191,7 +191,7 @@ ReadSchema: struct= 738)) AND (i_m Input [3]: [i_manufact_id#1, i_manufact#2, i_product_name#3] Arguments: [i_manufact#2, i_product_name#3], [i_manufact#2, i_product_name#3] -(4) Scan parquet spark_catalog.default.item +(4) CometScan parquet spark_catalog.default.item Output [5]: [i_category#4, i_manufact#5, i_size#6, i_color#7, i_units#8] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q42/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q42/explain.txt index c2c2c8bf3..b58d82c78 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q42/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q42/explain.txt @@ -20,7 +20,7 @@ +- CometScan parquet spark_catalog.default.item (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((((isnotnull(d_moy#3) AND isnotnull(d_year#2)) AND (d_moy#3 = 11)) Input [3]: [d_date_sk#1, d_year#2, d_moy#3] Arguments: [d_date_sk#1, d_year#2], [d_date_sk#1, d_year#2] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [5]: [d_date_sk#1, d_year#2, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Arguments: [d_year#2, ss_item_sk#4, ss_ext_sales_price#5], [d_year#2, ss_item_sk#4, ss_ext_sales_price#5] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_category_id#8, i_category#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q43/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q43/explain.txt index d6cb50a4f..2953a5f8f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q43/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q43/explain.txt @@ -20,7 +20,7 @@ +- CometScan parquet spark_catalog.default.store (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_day_name#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((isnotnull(d_year#2) AND (d_year#2 = 2000)) AND isnotnull(d_date_sk Input [3]: [d_date_sk#1, d_year#2, d_day_name#3] Arguments: [d_date_sk#1, d_day_name#3], [d_date_sk#1, d_day_name#3] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [5]: [d_date_sk#1, d_day_name#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Arguments: [d_day_name#3, ss_store_sk#4, ss_sales_price#5], [d_day_name#3, ss_store_sk#4, ss_sales_price#5] -(9) Scan parquet spark_catalog.default.store +(9) CometScan parquet spark_catalog.default.store Output [4]: [s_store_sk#7, s_store_id#8, s_store_name#9, s_gmt_offset#10] Batched: true Location [not included in comparison]/{warehouse_dir}/store] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q44/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q44/explain.txt index da94ff319..6e85991e4 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q44/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q44/explain.txt @@ -34,7 +34,7 @@ TakeOrderedAndProject (33) +- ReusedExchange (30) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_store_sk#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -130,7 +130,7 @@ Join condition: None Output [3]: [item_sk#7, rnk#11, item_sk#12] Input [4]: [item_sk#7, rnk#11, item_sk#12, rnk#14] -(24) Scan parquet spark_catalog.default.item +(24) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#15, i_product_name#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -187,7 +187,7 @@ Subquery:1 Hosting operator id = 7 Hosting Expression = Subquery scalar-subquery +- CometScan parquet spark_catalog.default.store_sales (34) -(34) Scan parquet spark_catalog.default.store_sales +(34) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_addr_sk#21, ss_store_sk#22, ss_net_profit#23, ss_sold_date_sk#24] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q45/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q45/explain.txt index f128499e3..962a51203 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q45/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q45/explain.txt @@ -37,7 +37,7 @@ TakeOrderedAndProject (36) +- CometScan parquet spark_catalog.default.item (25) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#2, ws_bill_customer_sk#3, ws_sales_price#4, ws_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -49,7 +49,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.store_sales +(3) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ Arguments: [i_item_sk#1], [ss_item_sk#4], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -108,7 +108,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -263,7 +263,7 @@ BroadcastExchange (51) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q48/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q48/explain.txt index 40181264b..70c7dc75f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q48/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q48/explain.txt @@ -29,7 +29,7 @@ +- CometScan parquet spark_catalog.default.date_dim (19) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [7]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_store_sk#3, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -41,7 +41,7 @@ ReadSchema: struct= 100.00) AND (ss_sales_price#5 <= 150.00)) OR ((ss_sales_price#5 >= 50.00) AND (ss_sales_price#5 <= 100.00))) OR ((ss_sales_price#5 >= 150.00) AND (ss_sales_price#5 <= 200.00)))) AND ((((ss_net_profit#6 >= 0.00) AND (ss_net_profit#6 <= 2000.00)) OR ((ss_net_profit#6 >= 150.00) AND (ss_net_profit#6 <= 3000.00))) OR ((ss_net_profit#6 >= 50.00) AND (ss_net_profit#6 <= 25000.00)))) -(3) Scan parquet spark_catalog.default.store +(3) CometScan parquet spark_catalog.default.store Output [1]: [s_store_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -65,7 +65,7 @@ Arguments: [ss_store_sk#3], [s_store_sk#9], Inner, BuildRight Input [8]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_store_sk#3, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, s_store_sk#9] Arguments: [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7], [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.customer_demographics +(8) CometScan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#10, cd_marital_status#11, cd_education_status#12] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -89,7 +89,7 @@ Arguments: [ss_cdemo_sk#1], [cd_demo_sk#10], Inner, ((((((cd_marital_status#11 = Input [9]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, cd_demo_sk#10, cd_marital_status#11, cd_education_status#12] Arguments: [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7], [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7] -(13) Scan parquet spark_catalog.default.customer_address +(13) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#13, ca_state#14, ca_country#15] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -117,7 +117,7 @@ Arguments: [ss_addr_sk#2], [ca_address_sk#13], Inner, ((((ca_state#14 IN (CO,OH, Input [6]: [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7, ca_address_sk#13, ca_state#14] Arguments: [ss_quantity#4, ss_sold_date_sk#7], [ss_quantity#4, ss_sold_date_sk#7] -(19) Scan parquet spark_catalog.default.date_dim +(19) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -172,7 +172,7 @@ BroadcastExchange (33) +- CometScan parquet spark_catalog.default.date_dim (29) -(29) Scan parquet spark_catalog.default.date_dim +(29) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q49/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q49/explain.txt index f7e750d47..2def4544c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q49/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q49/explain.txt @@ -78,7 +78,7 @@ TakeOrderedAndProject (77) +- ReusedExchange (59) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [6]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_net_profit#5, ws_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_so Input [5]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] -(5) Scan parquet spark_catalog.default.web_returns +(5) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11, wr_returned_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -122,7 +122,7 @@ Arguments: [ws_order_number#2, ws_item_sk#1], [wr_order_number#9, wr_item_sk#8], Input [9]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11] Arguments: [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11], [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -195,7 +195,7 @@ Condition : ((return_rank#25 <= 10) OR (currency_rank#26 <= 10)) Output [5]: [web AS channel#27, item#22, return_ratio#23, return_rank#25, currency_rank#26] Input [5]: [item#22, return_ratio#23, currency_ratio#24, return_rank#25, currency_rank#26] -(27) Scan parquet spark_catalog.default.catalog_sales +(27) CometScan parquet spark_catalog.default.catalog_sales Output [6]: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_net_profit#32, cs_sold_date_sk#33] Batched: true Location: InMemoryFileIndex [] @@ -215,7 +215,7 @@ Arguments: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, c Input [5]: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_sold_date_sk#33] Arguments: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_sold_date_sk#33] -(31) Scan parquet spark_catalog.default.catalog_returns +(31) CometScan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#35, cr_order_number#36, cr_return_quantity#37, cr_return_amount#38, cr_returned_date_sk#39] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -296,7 +296,7 @@ Condition : ((return_rank#50 <= 10) OR (currency_rank#51 <= 10)) Output [5]: [catalog AS channel#52, item#47, return_ratio#48, return_rank#50, currency_rank#51] Input [5]: [item#47, return_ratio#48, currency_ratio#49, return_rank#50, currency_rank#51] -(50) Scan parquet spark_catalog.default.store_sales +(50) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_net_profit#57, ss_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -316,7 +316,7 @@ Arguments: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, Input [5]: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_sold_date_sk#58] Arguments: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_sold_date_sk#58] -(54) Scan parquet spark_catalog.default.store_returns +(54) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#60, sr_ticket_number#61, sr_return_quantity#62, sr_return_amt#63, sr_returned_date_sk#64] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -431,7 +431,7 @@ BroadcastExchange (82) +- CometScan parquet spark_catalog.default.date_dim (78) -(78) Scan parquet spark_catalog.default.date_dim +(78) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt index 82b5c6690..02ad2f357 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt @@ -71,7 +71,7 @@ TakeOrderedAndProject (70) +- CometScan parquet spark_catalog.default.web_site (56) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -87,7 +87,7 @@ Condition : isnotnull(ss_store_sk#1) Input [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Arguments: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11], [ss_store_sk#1 AS store_sk#6, ss_sold_date_sk#4 AS date_sk#7, ss_ext_sales_price#2 AS sales_price#8, ss_net_profit#3 AS profit#9, 0.00 AS return_amt#10, 0.00 AS net_loss#11] -(4) Scan parquet spark_catalog.default.store_returns +(4) CometScan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -107,7 +107,7 @@ Arguments: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, n Child 0 Input [6]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11] Child 1 Input [6]: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, net_loss#21] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -135,7 +135,7 @@ Arguments: [date_sk#7], [d_date_sk#22], Inner, BuildRight Input [7]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11, d_date_sk#22] Arguments: [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11], [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11] -(14) Scan parquet spark_catalog.default.store +(14) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#24, s_store_id#25] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -178,7 +178,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#8)), sum(UnscaledValue(return_amt# Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#8))#30, sum(UnscaledValue(return_amt#10))#31, sum(UnscaledValue(profit#9))#32, sum(UnscaledValue(net_loss#11))#33] Results [5]: [MakeDecimal(sum(UnscaledValue(sales_price#8))#30,17,2) AS sales#34, MakeDecimal(sum(UnscaledValue(return_amt#10))#31,17,2) AS returns#35, (MakeDecimal(sum(UnscaledValue(profit#9))#32,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#11))#33,17,2)) AS profit#36, store channel AS channel#37, concat(store, s_store_id#25) AS id#38] -(23) Scan parquet spark_catalog.default.catalog_sales +(23) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_catalog_page_sk#39, cs_ext_sales_price#40, cs_net_profit#41, cs_sold_date_sk#42] Batched: true Location: InMemoryFileIndex [] @@ -194,7 +194,7 @@ Condition : isnotnull(cs_catalog_page_sk#39) Input [4]: [cs_catalog_page_sk#39, cs_ext_sales_price#40, cs_net_profit#41, cs_sold_date_sk#42] Arguments: [page_sk#44, date_sk#45, sales_price#46, profit#47, return_amt#48, net_loss#49], [cs_catalog_page_sk#39 AS page_sk#44, cs_sold_date_sk#42 AS date_sk#45, cs_ext_sales_price#40 AS sales_price#46, cs_net_profit#41 AS profit#47, 0.00 AS return_amt#48, 0.00 AS net_loss#49] -(26) Scan parquet spark_catalog.default.catalog_returns +(26) CometScan parquet spark_catalog.default.catalog_returns Output [4]: [cr_catalog_page_sk#50, cr_return_amount#51, cr_net_loss#52, cr_returned_date_sk#53] Batched: true Location: InMemoryFileIndex [] @@ -226,7 +226,7 @@ Arguments: [date_sk#45], [d_date_sk#60], Inner, BuildRight Input [7]: [page_sk#44, date_sk#45, sales_price#46, profit#47, return_amt#48, net_loss#49, d_date_sk#60] Arguments: [page_sk#44, sales_price#46, profit#47, return_amt#48, net_loss#49], [page_sk#44, sales_price#46, profit#47, return_amt#48, net_loss#49] -(33) Scan parquet spark_catalog.default.catalog_page +(33) CometScan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#61, cp_catalog_page_id#62] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -269,7 +269,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#46)), sum(UnscaledValue(return_amt Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#46))#67, sum(UnscaledValue(return_amt#48))#68, sum(UnscaledValue(profit#47))#69, sum(UnscaledValue(net_loss#49))#70] Results [5]: [MakeDecimal(sum(UnscaledValue(sales_price#46))#67,17,2) AS sales#71, MakeDecimal(sum(UnscaledValue(return_amt#48))#68,17,2) AS returns#72, (MakeDecimal(sum(UnscaledValue(profit#47))#69,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#49))#70,17,2)) AS profit#73, catalog channel AS channel#74, concat(catalog_page, cp_catalog_page_id#62) AS id#75] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_web_site_sk#76, ws_ext_sales_price#77, ws_net_profit#78, ws_sold_date_sk#79] Batched: true Location: InMemoryFileIndex [] @@ -285,7 +285,7 @@ Condition : isnotnull(ws_web_site_sk#76) Input [4]: [ws_web_site_sk#76, ws_ext_sales_price#77, ws_net_profit#78, ws_sold_date_sk#79] Arguments: [wsr_web_site_sk#81, date_sk#82, sales_price#83, profit#84, return_amt#85, net_loss#86], [ws_web_site_sk#76 AS wsr_web_site_sk#81, ws_sold_date_sk#79 AS date_sk#82, ws_ext_sales_price#77 AS sales_price#83, ws_net_profit#78 AS profit#84, 0.00 AS return_amt#85, 0.00 AS net_loss#86] -(45) Scan parquet spark_catalog.default.web_returns +(45) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#87, wr_order_number#88, wr_return_amt#89, wr_net_loss#90, wr_returned_date_sk#91] Batched: true Location: InMemoryFileIndex [] @@ -296,7 +296,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -122,7 +122,7 @@ Arguments: hashpartitioning(item_sk#9, d_date#6, 5), ENSURE_REQUIREMENTS, [plan_ Input [3]: [item_sk#9, d_date#6, cume_sales#11] Arguments: [item_sk#9 ASC NULLS FIRST, d_date#6 ASC NULLS FIRST], false, 0 -(19) Scan parquet spark_catalog.default.store_sales +(19) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#12, ss_sales_price#13, ss_sold_date_sk#14] Batched: true Location: InMemoryFileIndex [] @@ -227,7 +227,7 @@ BroadcastExchange (45) +- CometScan parquet spark_catalog.default.date_dim (41) -(41) Scan parquet spark_catalog.default.date_dim +(41) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q52/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q52/explain.txt index eee36d229..42b974e53 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q52/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q52/explain.txt @@ -20,7 +20,7 @@ +- CometScan parquet spark_catalog.default.item (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((((isnotnull(d_moy#3) AND isnotnull(d_year#2)) AND (d_moy#3 = 11)) Input [3]: [d_date_sk#1, d_year#2, d_moy#3] Arguments: [d_date_sk#1, d_year#2], [d_date_sk#1, d_year#2] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [5]: [d_date_sk#1, d_year#2, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Arguments: [d_year#2, ss_item_sk#4, ss_ext_sales_price#5], [d_year#2, ss_item_sk#4, ss_ext_sales_price#5] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_brand_id#8, i_brand#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q53/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q53/explain.txt index 8156e275b..9b71fa400 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q53/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q53/explain.txt @@ -30,7 +30,7 @@ TakeOrderedAndProject (29) +- CometScan parquet spark_catalog.default.store (15) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4, i_manufact_id#5] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -45,7 +45,7 @@ Condition : ((((i_category#4 IN (Books Input [5]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4, i_manufact_id#5] Arguments: [i_item_sk#1, i_manufact_id#5], [i_item_sk#1, i_manufact_id#5] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#10, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -70,7 +70,7 @@ Arguments: [i_item_sk#1], [ss_item_sk#10], Inner, BuildRight Input [6]: [i_item_sk#1, i_manufact_id#5, ss_item_sk#10, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] Arguments: [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13], [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] -(9) Scan parquet spark_catalog.default.date_dim +(9) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_month_seq#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -98,7 +98,7 @@ Arguments: [ss_sold_date_sk#13], [d_date_sk#15], Inner, BuildRight Input [6]: [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13, d_date_sk#15, d_qoy#17] Arguments: [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, d_qoy#17], [i_manufact_id#5, ss_store_sk#11, ss_sales_price#12, d_qoy#17] -(15) Scan parquet spark_catalog.default.store +(15) CometScan parquet spark_catalog.default.store Output [1]: [s_store_sk#18] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -173,7 +173,7 @@ BroadcastExchange (34) +- CometScan parquet spark_catalog.default.date_dim (30) -(30) Scan parquet spark_catalog.default.date_dim +(30) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_month_seq#16, d_qoy#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q54/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q54/explain.txt index d5bd8e387..73422b292 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q54/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q54/explain.txt @@ -57,7 +57,7 @@ +- CometScan parquet spark_catalog.default.date_dim (43) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -73,7 +73,7 @@ Condition : (isnotnull(cs_item_sk#2) AND isnotnull(cs_bill_customer_sk#1)) Input [3]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_sold_date_sk#3] Arguments: [sold_date_sk#5, customer_sk#6, item_sk#7], [cs_sold_date_sk#3 AS sold_date_sk#5, cs_bill_customer_sk#1 AS customer_sk#6, cs_item_sk#2 AS item_sk#7] -(4) Scan parquet spark_catalog.default.web_sales +(4) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#8, ws_bill_customer_sk#9, ws_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -93,7 +93,7 @@ Arguments: [sold_date_sk#11, customer_sk#12, item_sk#13], [ws_sold_date_sk#10 AS Child 0 Input [3]: [sold_date_sk#5, customer_sk#6, item_sk#7] Child 1 Input [3]: [sold_date_sk#11, customer_sk#12, item_sk#13] -(8) Scan parquet spark_catalog.default.item +(8) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#14, i_class#15, i_category#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -121,7 +121,7 @@ Arguments: [item_sk#7], [i_item_sk#14], Inner, BuildRight Input [4]: [sold_date_sk#5, customer_sk#6, item_sk#7, i_item_sk#14] Arguments: [sold_date_sk#5, customer_sk#6], [sold_date_sk#5, customer_sk#6] -(14) Scan parquet spark_catalog.default.date_dim +(14) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#17, d_year#18, d_moy#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -149,7 +149,7 @@ Arguments: [sold_date_sk#5], [d_date_sk#17], Inner, BuildRight Input [3]: [sold_date_sk#5, customer_sk#6, d_date_sk#17] Arguments: [customer_sk#6], [customer_sk#6] -(20) Scan parquet spark_catalog.default.customer +(20) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#20, c_current_addr_sk#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -187,7 +187,7 @@ Input [2]: [c_customer_sk#20, c_current_addr_sk#21] Keys [2]: [c_customer_sk#20, c_current_addr_sk#21] Functions: [] -(28) Scan parquet spark_catalog.default.store_sales +(28) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_customer_sk#22, ss_ext_sales_price#23, ss_sold_date_sk#24] Batched: true Location: InMemoryFileIndex [] @@ -212,7 +212,7 @@ Arguments: [c_customer_sk#20], [ss_customer_sk#22], Inner, BuildRight Input [5]: [c_customer_sk#20, c_current_addr_sk#21, ss_customer_sk#22, ss_ext_sales_price#23, ss_sold_date_sk#24] Arguments: [c_customer_sk#20, c_current_addr_sk#21, ss_ext_sales_price#23, ss_sold_date_sk#24], [c_customer_sk#20, c_current_addr_sk#21, ss_ext_sales_price#23, ss_sold_date_sk#24] -(33) Scan parquet spark_catalog.default.customer_address +(33) CometScan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#26, ca_county#27, ca_state#28] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -236,7 +236,7 @@ Arguments: [c_current_addr_sk#21], [ca_address_sk#26], Inner, BuildRight Input [7]: [c_customer_sk#20, c_current_addr_sk#21, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_address_sk#26, ca_county#27, ca_state#28] Arguments: [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_county#27, ca_state#28], [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_county#27, ca_state#28] -(38) Scan parquet spark_catalog.default.store +(38) CometScan parquet spark_catalog.default.store Output [2]: [s_county#29, s_state#30] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -260,7 +260,7 @@ Arguments: [ca_county#27, ca_state#28], [s_county#29, s_state#30], Inner, BuildR Input [7]: [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24, ca_county#27, ca_state#28, s_county#29, s_state#30] Arguments: [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24], [c_customer_sk#20, ss_ext_sales_price#23, ss_sold_date_sk#24] -(43) Scan parquet spark_catalog.default.date_dim +(43) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#31, d_month_seq#32] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -333,7 +333,7 @@ BroadcastExchange (61) +- CometScan parquet spark_catalog.default.date_dim (57) -(57) Scan parquet spark_catalog.default.date_dim +(57) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#17, d_year#18, d_moy#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -365,7 +365,7 @@ BroadcastExchange (66) +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#31, d_month_seq#32] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -397,7 +397,7 @@ Subquery:4 Hosting operator id = 63 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (67) -(67) Scan parquet spark_catalog.default.date_dim +(67) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#42, d_year#43, d_moy#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -439,7 +439,7 @@ Subquery:5 Hosting operator id = 63 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (74) -(74) Scan parquet spark_catalog.default.date_dim +(74) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#46, d_year#47, d_moy#48] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q55/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q55/explain.txt index 6eff9f52d..46240a3c0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q55/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q55/explain.txt @@ -20,7 +20,7 @@ +- CometScan parquet spark_catalog.default.item (9) -(1) Scan parquet spark_catalog.default.date_dim +(1) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -35,7 +35,7 @@ Condition : ((((isnotnull(d_moy#3) AND isnotnull(d_year#2)) AND (d_moy#3 = 11)) Input [3]: [d_date_sk#1, d_year#2, d_moy#3] Arguments: [d_date_sk#1], [d_date_sk#1] -(4) Scan parquet spark_catalog.default.store_sales +(4) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Arguments: [d_date_sk#1], [ss_sold_date_sk#6], Inner, BuildRight Input [4]: [d_date_sk#1, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Arguments: [ss_item_sk#4, ss_ext_sales_price#5], [ss_item_sk#4, ss_ext_sales_price#5] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_brand_id#8, i_brand#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q56/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q56/explain.txt index d9af63246..c56eb8287 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q56/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q56/explain.txt @@ -62,7 +62,7 @@ +- ReusedExchange (50) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -74,7 +74,7 @@ ReadSchema: struct Input [2]: [i_item_sk#11, i_item_id#12] Condition : isnotnull(i_item_sk#11) -(17) Scan parquet spark_catalog.default.item +(17) CometScan parquet spark_catalog.default.item Output [2]: [i_item_id#13, i_color#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -192,7 +192,7 @@ Input [2]: [i_item_id#12, sum#15] Keys [1]: [i_item_id#12] Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] -(28) Scan parquet spark_catalog.default.catalog_sales +(28) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#16, cs_item_sk#17, cs_ext_sales_price#18, cs_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -254,7 +254,7 @@ Input [2]: [i_item_id#24, sum#25] Keys [1]: [i_item_id#24] Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#18))] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#26, ws_bill_addr_sk#27, ws_ext_sales_price#28, ws_sold_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -352,7 +352,7 @@ BroadcastExchange (66) +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#7, d_moy#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q57/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q57/explain.txt index 629959102..78df07e8f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q57/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q57/explain.txt @@ -48,7 +48,7 @@ TakeOrderedAndProject (47) +- ReusedExchange (39) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#1, i_brand#2, i_category#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.catalog_sales +(3) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ Arguments: [i_item_sk#1], [cs_item_sk#5], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -108,7 +108,7 @@ Arguments: [cs_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.call_center +(13) CometScan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#12, cc_name#13] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] @@ -263,7 +263,7 @@ BroadcastExchange (51) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q58/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q58/explain.txt index f05dbd720..cd1e112f6 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q58/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q58/explain.txt @@ -52,7 +52,7 @@ +- ReusedExchange (40) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -64,7 +64,7 @@ ReadSchema: struct Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_item_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -88,7 +88,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [5]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, i_item_id#6] Arguments: [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6], [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -142,7 +142,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#2))] Input [2]: [item_id#11, ss_item_rev#12] Condition : isnotnull(ss_item_rev#12) -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#13, cs_ext_sales_price#14, cs_sold_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -209,7 +209,7 @@ Arguments: [item_id#11], [item_id#21], Inner, ((((cast(ss_item_rev#12 as decimal Input [4]: [item_id#11, ss_item_rev#12, item_id#21, cs_item_rev#22] Arguments: [item_id#11, ss_item_rev#12, cs_item_rev#22], [item_id#11, ss_item_rev#12, cs_item_rev#22] -(35) Scan parquet spark_catalog.default.web_sales +(35) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#23, ws_ext_sales_price#24, ws_sold_date_sk#25] Batched: true Location: InMemoryFileIndex [] @@ -298,7 +298,7 @@ BroadcastExchange (61) +- CometScan parquet spark_catalog.default.date_dim (54) -(52) Scan parquet spark_catalog.default.date_dim +(52) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -309,7 +309,7 @@ ReadSchema: struct Input [2]: [d_date_sk#7, d_date#8] Condition : isnotnull(d_date_sk#7) -(54) Scan parquet spark_catalog.default.date_dim +(54) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#9, d_week_seq#37] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -351,7 +351,7 @@ Subquery:2 Hosting operator id = 55 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#40, d_week_seq#41] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q59/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q59/explain.txt index d194660f8..acdbfd171 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q59/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q59/explain.txt @@ -40,7 +40,7 @@ +- CometScan parquet spark_catalog.default.date_dim (29) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#1, ss_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -52,7 +52,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_week_seq#5, d_day_name#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -90,7 +90,7 @@ Input [9]: [d_week_seq#5, ss_store_sk#1, sum#7, sum#8, sum#9, sum#10, sum#11, su Keys [2]: [d_week_seq#5, ss_store_sk#1] Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Monday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Tuesday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Wednesday) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Thursday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Friday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Saturday ) THEN ss_sales_price#2 END))] -(11) Scan parquet spark_catalog.default.store +(11) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#14, s_store_id#15, s_store_name#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -114,7 +114,7 @@ Arguments: [ss_store_sk#1], [s_store_sk#14], Inner, BuildRight Input [12]: [d_week_seq#5, ss_store_sk#1, sun_sales#17, mon_sales#18, tue_sales#19, wed_sales#20, thu_sales#21, fri_sales#22, sat_sales#23, s_store_sk#14, s_store_id#15, s_store_name#16] Arguments: [d_week_seq#5, sun_sales#17, mon_sales#18, tue_sales#19, wed_sales#20, thu_sales#21, fri_sales#22, sat_sales#23, s_store_id#15, s_store_name#16], [d_week_seq#5, sun_sales#17, mon_sales#18, tue_sales#19, wed_sales#20, thu_sales#21, fri_sales#22, sat_sales#23, s_store_id#15, s_store_name#16] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_month_seq#24, d_week_seq#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -150,7 +150,7 @@ Input [9]: [d_week_seq#5, ss_store_sk#1, sum#36, sum#37, sum#38, sum#39, sum#40, Keys [2]: [d_week_seq#5, ss_store_sk#1] Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Monday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Tuesday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Wednesday) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Thursday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Friday ) THEN ss_sales_price#2 END)), sum(UnscaledValue(CASE WHEN (d_day_name#6 = Saturday ) THEN ss_sales_price#2 END))] -(24) Scan parquet spark_catalog.default.store +(24) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#43, s_store_id#44] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -174,7 +174,7 @@ Arguments: [ss_store_sk#1], [s_store_sk#43], Inner, BuildRight Input [11]: [d_week_seq#5, ss_store_sk#1, sun_sales#17, mon_sales#18, tue_sales#19, wed_sales#20, thu_sales#21, fri_sales#22, sat_sales#23, s_store_sk#43, s_store_id#44] Arguments: [d_week_seq#5, sun_sales#17, mon_sales#18, tue_sales#19, wed_sales#20, thu_sales#21, fri_sales#22, sat_sales#23, s_store_id#44], [d_week_seq#5, sun_sales#17, mon_sales#18, tue_sales#19, wed_sales#20, thu_sales#21, fri_sales#22, sat_sales#23, s_store_id#44] -(29) Scan parquet spark_catalog.default.date_dim +(29) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_month_seq#45, d_week_seq#46] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q6/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q6/explain.txt index 7a8044667..e87f6ce76 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q6/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q6/explain.txt @@ -39,7 +39,7 @@ +- CometScan parquet spark_catalog.default.item (21) -(1) Scan parquet spark_catalog.default.customer_address +(1) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#1, ca_state#2] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -50,7 +50,7 @@ ReadSchema: struct Input [2]: [ca_address_sk#1, ca_state#2] Condition : isnotnull(ca_address_sk#1) -(3) Scan parquet spark_catalog.default.customer +(3) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#3, c_current_addr_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -74,7 +74,7 @@ Arguments: [ca_address_sk#1], [c_current_addr_sk#4], Inner, BuildRight Input [4]: [ca_address_sk#1, ca_state#2, c_customer_sk#3, c_current_addr_sk#4] Arguments: [ca_state#2, c_customer_sk#3], [ca_state#2, c_customer_sk#3] -(8) Scan parquet spark_catalog.default.store_sales +(8) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -99,7 +99,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], Inner, BuildRight Input [5]: [ca_state#2, c_customer_sk#3, ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Arguments: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7], [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7] -(13) Scan parquet spark_catalog.default.date_dim +(13) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -127,7 +127,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [4]: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7, d_date_sk#9] Arguments: [ca_state#2, ss_item_sk#5], [ca_state#2, ss_item_sk#5] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#13, i_current_price#14, i_category#15] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -138,7 +138,7 @@ ReadSchema: struct Input [3]: [i_item_sk#13, i_current_price#14, i_category#15] Condition : ((isnotnull(i_current_price#14) AND isnotnull(i_category#15)) AND isnotnull(i_item_sk#13)) -(21) Scan parquet spark_catalog.default.item +(21) CometScan parquet spark_catalog.default.item Output [2]: [i_current_price#16, i_category#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -228,7 +228,7 @@ BroadcastExchange (43) +- CometScan parquet spark_catalog.default.date_dim (39) -(39) Scan parquet spark_catalog.default.date_dim +(39) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -260,7 +260,7 @@ Subquery:2 Hosting operator id = 40 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (44) -(44) Scan parquet spark_catalog.default.date_dim +(44) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#24, d_year#25, d_moy#26] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q60/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q60/explain.txt index 3b5101925..0d64004d9 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q60/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q60/explain.txt @@ -62,7 +62,7 @@ +- ReusedExchange (50) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -74,7 +74,7 @@ ReadSchema: struct Input [2]: [i_item_sk#11, i_item_id#12] Condition : isnotnull(i_item_sk#11) -(17) Scan parquet spark_catalog.default.item +(17) CometScan parquet spark_catalog.default.item Output [2]: [i_item_id#13, i_category#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -192,7 +192,7 @@ Input [2]: [i_item_id#12, sum#15] Keys [1]: [i_item_id#12] Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] -(28) Scan parquet spark_catalog.default.catalog_sales +(28) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#16, cs_item_sk#17, cs_ext_sales_price#18, cs_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -254,7 +254,7 @@ Input [2]: [i_item_id#24, sum#25] Keys [1]: [i_item_id#24] Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#18))] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#26, ws_bill_addr_sk#27, ws_ext_sales_price#28, ws_sold_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -352,7 +352,7 @@ BroadcastExchange (66) +- CometScan parquet spark_catalog.default.date_dim (62) -(62) Scan parquet spark_catalog.default.date_dim +(62) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#7, d_moy#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q61/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q61/explain.txt index 8fcd94392..4a157e64e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q61/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q61/explain.txt @@ -66,7 +66,7 @@ +- ReusedExchange (56) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_promo_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -78,7 +78,7 @@ ReadSchema: struct Input [2]: [s_store_sk#1, s_store_name#2] Condition : isnotnull(s_store_sk#1) -(3) Scan parquet spark_catalog.default.store_sales +(3) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -63,7 +63,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Condition : (isnotnull(ss_store_sk#4) AND isnotnull(ss_item_sk#3)) -(5) Scan parquet spark_catalog.default.date_dim +(5) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#8, d_month_seq#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -122,7 +122,7 @@ Arguments: [s_store_sk#1], [ss_store_sk#4], Inner, BuildRight Input [5]: [s_store_sk#1, s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#11] Arguments: [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#11], [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#11] -(18) Scan parquet spark_catalog.default.item +(18) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#12, i_item_desc#13, i_current_price#14, i_wholesale_cost#15, i_brand#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -146,7 +146,7 @@ Arguments: [ss_item_sk#3], [i_item_sk#12], Inner, BuildRight Input [9]: [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#11, i_item_sk#12, i_item_desc#13, i_current_price#14, i_wholesale_cost#15, i_brand#16] Arguments: [s_store_name#2, ss_store_sk#4, revenue#11, i_item_desc#13, i_current_price#14, i_wholesale_cost#15, i_brand#16], [s_store_name#2, ss_store_sk#4, revenue#11, i_item_desc#13, i_current_price#14, i_wholesale_cost#15, i_brand#16] -(23) Scan parquet spark_catalog.default.store_sales +(23) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#17, ss_store_sk#18, ss_sales_price#19, ss_sold_date_sk#20] Batched: true Location: InMemoryFileIndex [] @@ -232,7 +232,7 @@ BroadcastExchange (44) +- CometScan parquet spark_catalog.default.date_dim (40) -(40) Scan parquet spark_catalog.default.date_dim +(40) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#8, d_month_seq#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q66/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q66/explain.txt index c8cdc8094..693eb5662 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q66/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q66/explain.txt @@ -51,7 +51,7 @@ +- ReusedExchange (39) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [7]: [ws_sold_time_sk#1, ws_ship_mode_sk#2, ws_warehouse_sk#3, ws_quantity#4, ws_ext_sales_price#5, ws_net_paid#6, ws_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -63,7 +63,7 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [c_customer_sk#1], [ss_customer_sk#4], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ Right keys [1]: [ws_bill_customer_sk#10] Join type: LeftAnti Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#14, cs_sold_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -171,7 +171,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#2, c_current_addr_sk#3] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(28) Scan parquet spark_catalog.default.customer_address +(28) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#18, ca_state#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -203,7 +203,7 @@ Join condition: None Output [1]: [c_current_cdemo_sk#2] Input [3]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#18] -(35) Scan parquet spark_catalog.default.customer_demographics +(35) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#20, cd_gender#21, cd_marital_status#22, cd_education_status#23, cd_purchase_estimate#24, cd_credit_rating#25] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q7/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q7/explain.txt index 1f5e1338e..d18a60b0b 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q7/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q7/explain.txt @@ -31,7 +31,7 @@ +- CometScan parquet spark_catalog.default.promotion (20) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [8]: [ss_item_sk#1, ss_cdemo_sk#2, ss_promo_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -43,7 +43,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -88,7 +88,7 @@ Arguments: [ss_store_sk#1, ss_net_profit#2], [ss_store_sk#1, ss_net_profit#2] (9) ColumnarToRow [codegen id : 4] Input [2]: [ss_store_sk#1, ss_net_profit#2] -(10) Scan parquet spark_catalog.default.store +(10) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#7, s_county#8, s_state#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -102,7 +102,7 @@ Condition : isnotnull(s_store_sk#7) (12) ColumnarToRow [codegen id : 3] Input [3]: [s_store_sk#7, s_county#8, s_state#9] -(13) Scan parquet spark_catalog.default.store_sales +(13) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -114,7 +114,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Condition : isnotnull(ss_store_sk#10) -(15) Scan parquet spark_catalog.default.store +(15) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#14, s_state#15] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -259,7 +259,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q71/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q71/explain.txt index 65ffab7e8..79a71af03 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q71/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q71/explain.txt @@ -38,7 +38,7 @@ +- CometScan parquet spark_catalog.default.time_dim (26) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#1, i_brand_id#2, i_brand#3, i_manager_id#4] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -57,7 +57,7 @@ Arguments: [i_item_sk#1, i_brand_id#2, i_brand#3], [i_item_sk#1, i_brand_id#2, i Input [3]: [i_item_sk#1, i_brand_id#2, i_brand#3] Arguments: [i_item_sk#1, i_brand_id#2, i_brand#3] -(5) Scan parquet spark_catalog.default.web_sales +(5) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_sold_time_sk#5, ws_item_sk#6, ws_ext_sales_price#7, ws_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -69,7 +69,7 @@ ReadSchema: struct date_add(d_d Input [10]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24, d_date_sk#28, d_date#29] Arguments: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] -(46) Scan parquet spark_catalog.default.promotion +(46) CometScan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#30] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -314,7 +314,7 @@ Arguments: hashpartitioning(cs_item_sk#4, cs_order_number#6, 5), ENSURE_REQUIREM Input [5]: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Arguments: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4 ASC NULLS FIRST, cs_order_number#6 ASC NULLS FIRST] -(53) Scan parquet spark_catalog.default.catalog_returns +(53) CometScan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#31, cr_order_number#32, cr_returned_date_sk#33] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -377,7 +377,7 @@ BroadcastExchange (69) +- CometScan parquet spark_catalog.default.date_dim (65) -(65) Scan parquet spark_catalog.default.date_dim +(65) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_date_sk#22, d_date#23, d_week_seq#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q73/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q73/explain.txt index 7530f5aa5..e4d1ff5a2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q73/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q73/explain.txt @@ -33,7 +33,7 @@ +- CometScan parquet spark_catalog.default.customer (25) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -45,7 +45,7 @@ ReadSchema: struct= 1) AND (cnt#17 <= 5)) -(25) Scan parquet spark_catalog.default.customer +(25) CometScan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -192,7 +192,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_dom#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q74/explain.txt index b23b0b48c..c7a559c8e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q74/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q74/explain.txt @@ -69,7 +69,7 @@ +- ReusedExchange (58) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -80,7 +80,7 @@ ReadSchema: struct 0.00)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#14, c_customer_id#15, c_first_name#16, c_last_name#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -158,7 +158,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -83,7 +83,7 @@ Arguments: [ss_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3, d_date_sk#5] Arguments: [ss_store_sk#1, ss_net_profit#2], [ss_store_sk#1, ss_net_profit#2] -(9) Scan parquet spark_catalog.default.store +(9) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#8, s_store_name#9, s_zip#10] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -107,7 +107,7 @@ Arguments: [ss_store_sk#1], [s_store_sk#8], Inner, BuildRight Input [5]: [ss_store_sk#1, ss_net_profit#2, s_store_sk#8, s_store_name#9, s_zip#10] Arguments: [ss_net_profit#2, s_store_name#9, s_zip#10], [ss_net_profit#2, s_store_name#9, s_zip#10] -(14) Scan parquet spark_catalog.default.customer_address +(14) CometScan parquet spark_catalog.default.customer_address Output [1]: [ca_zip#11] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -121,7 +121,7 @@ Condition : (substr(ca_zip#11, 1, 5) INSET 10144, 10336, 10390, 10445, 10516, 10 Input [1]: [ca_zip#11] Arguments: [ca_zip#12], [substr(ca_zip#11, 1, 5) AS ca_zip#12] -(17) Scan parquet spark_catalog.default.customer_address +(17) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#13, ca_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -132,7 +132,7 @@ ReadSchema: struct Input [2]: [ca_address_sk#13, ca_zip#14] Condition : isnotnull(ca_address_sk#13) -(19) Scan parquet spark_catalog.default.customer +(19) CometScan parquet spark_catalog.default.customer Output [2]: [c_current_addr_sk#15, c_preferred_cust_flag#16] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -249,7 +249,7 @@ BroadcastExchange (47) +- CometScan parquet spark_catalog.default.date_dim (43) -(43) Scan parquet spark_catalog.default.date_dim +(43) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt index 962e04f07..65fe1a28b 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt @@ -103,7 +103,7 @@ TakeOrderedAndProject (102) +- ReusedExchange (90) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -123,7 +123,7 @@ Arguments: hashpartitioning(ss_item_sk#1, ss_ticket_number#4, 5), ENSURE_REQUIRE Input [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Arguments: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7], [ss_item_sk#1 ASC NULLS FIRST, ss_ticket_number#4 ASC NULLS FIRST] -(5) Scan parquet spark_catalog.default.store_returns +(5) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#9, sr_ticket_number#10, sr_return_amt#11, sr_net_loss#12, sr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -155,7 +155,7 @@ Arguments: [ss_item_sk#1, ss_ticket_number#4], [sr_item_sk#9, sr_ticket_number#1 Input [11]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_item_sk#9, sr_ticket_number#10, sr_return_amt#11, sr_net_loss#12] Arguments: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12], [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12] -(12) Scan parquet spark_catalog.default.date_dim +(12) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -183,7 +183,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#14], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12, d_date_sk#14] Arguments: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12], [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12] -(18) Scan parquet spark_catalog.default.store +(18) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#16, s_store_id#17] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -207,7 +207,7 @@ Arguments: [ss_store_sk#2], [s_store_sk#16], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_sk#16, s_store_id#17] Arguments: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17], [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17] -(23) Scan parquet spark_catalog.default.item +(23) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#18, i_current_price#19] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -235,7 +235,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#18], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17, i_item_sk#18] Arguments: [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17], [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#17] -(29) Scan parquet spark_catalog.default.promotion +(29) CometScan parquet spark_catalog.default.promotion Output [2]: [p_promo_sk#20, p_channel_tv#21] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -282,7 +282,7 @@ Functions [3]: [sum(UnscaledValue(ss_ext_sales_price#5)), sum(coalesce(cast(sr_r Aggregate Attributes [3]: [sum(UnscaledValue(ss_ext_sales_price#5))#27, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#28, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#29] Results [5]: [MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#5))#27,17,2) AS sales#30, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#28 AS returns#31, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#29 AS profit#32, store channel AS channel#33, concat(store, s_store_id#17) AS id#34] -(39) Scan parquet spark_catalog.default.catalog_sales +(39) CometScan parquet spark_catalog.default.catalog_sales Output [7]: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_order_number#38, cs_ext_sales_price#39, cs_net_profit#40, cs_sold_date_sk#41] Batched: true Location: InMemoryFileIndex [] @@ -302,7 +302,7 @@ Arguments: hashpartitioning(cs_item_sk#36, cs_order_number#38, 5), ENSURE_REQUIR Input [7]: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_order_number#38, cs_ext_sales_price#39, cs_net_profit#40, cs_sold_date_sk#41] Arguments: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_order_number#38, cs_ext_sales_price#39, cs_net_profit#40, cs_sold_date_sk#41], [cs_item_sk#36 ASC NULLS FIRST, cs_order_number#38 ASC NULLS FIRST] -(43) Scan parquet spark_catalog.default.catalog_returns +(43) CometScan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#43, cr_order_number#44, cr_return_amount#45, cr_net_loss#46, cr_returned_date_sk#47] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -346,7 +346,7 @@ Arguments: [cs_sold_date_sk#41], [d_date_sk#48], Inner, BuildRight Input [9]: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_ext_sales_price#39, cs_net_profit#40, cs_sold_date_sk#41, cr_return_amount#45, cr_net_loss#46, d_date_sk#48] Arguments: [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_ext_sales_price#39, cs_net_profit#40, cr_return_amount#45, cr_net_loss#46], [cs_catalog_page_sk#35, cs_item_sk#36, cs_promo_sk#37, cs_ext_sales_price#39, cs_net_profit#40, cr_return_amount#45, cr_net_loss#46] -(53) Scan parquet spark_catalog.default.catalog_page +(53) CometScan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#49, cp_catalog_page_id#50] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -413,7 +413,7 @@ Functions [3]: [sum(UnscaledValue(cs_ext_sales_price#39)), sum(coalesce(cast(cr_ Aggregate Attributes [3]: [sum(UnscaledValue(cs_ext_sales_price#39))#58, sum(coalesce(cast(cr_return_amount#45 as decimal(12,2)), 0.00))#59, sum((cs_net_profit#40 - coalesce(cast(cr_net_loss#46 as decimal(12,2)), 0.00)))#60] Results [5]: [MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#39))#58,17,2) AS sales#61, sum(coalesce(cast(cr_return_amount#45 as decimal(12,2)), 0.00))#59 AS returns#62, sum((cs_net_profit#40 - coalesce(cast(cr_net_loss#46 as decimal(12,2)), 0.00)))#60 AS profit#63, catalog channel AS channel#64, concat(catalog_page, cp_catalog_page_id#50) AS id#65] -(68) Scan parquet spark_catalog.default.web_sales +(68) CometScan parquet spark_catalog.default.web_sales Output [7]: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_order_number#69, ws_ext_sales_price#70, ws_net_profit#71, ws_sold_date_sk#72] Batched: true Location: InMemoryFileIndex [] @@ -433,7 +433,7 @@ Arguments: hashpartitioning(ws_item_sk#66, ws_order_number#69, 5), ENSURE_REQUIR Input [7]: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_order_number#69, ws_ext_sales_price#70, ws_net_profit#71, ws_sold_date_sk#72] Arguments: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_order_number#69, ws_ext_sales_price#70, ws_net_profit#71, ws_sold_date_sk#72], [ws_item_sk#66 ASC NULLS FIRST, ws_order_number#69 ASC NULLS FIRST] -(72) Scan parquet spark_catalog.default.web_returns +(72) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#74, wr_order_number#75, wr_return_amt#76, wr_net_loss#77, wr_returned_date_sk#78] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -477,7 +477,7 @@ Arguments: [ws_sold_date_sk#72], [d_date_sk#79], Inner, BuildRight Input [9]: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_ext_sales_price#70, ws_net_profit#71, ws_sold_date_sk#72, wr_return_amt#76, wr_net_loss#77, d_date_sk#79] Arguments: [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_ext_sales_price#70, ws_net_profit#71, wr_return_amt#76, wr_net_loss#77], [ws_item_sk#66, ws_web_site_sk#67, ws_promo_sk#68, ws_ext_sales_price#70, ws_net_profit#71, wr_return_amt#76, wr_net_loss#77] -(82) Scan parquet spark_catalog.default.web_site +(82) CometScan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#80, web_site_id#81] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] @@ -582,7 +582,7 @@ BroadcastExchange (107) +- CometScan parquet spark_catalog.default.date_dim (103) -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q81/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q81/explain.txt index 8be7d0b3e..8ae0dae9a 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q81/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q81/explain.txt @@ -48,7 +48,7 @@ +- CometScan parquet spark_catalog.default.customer_address (41) -(1) Scan parquet spark_catalog.default.catalog_returns +(1) CometScan parquet spark_catalog.default.catalog_returns Output [4]: [cr_returning_customer_sk#1, cr_returning_addr_sk#2, cr_return_amt_inc_tax#3, cr_returned_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ ReadSchema: struct= 62.00)) A Input [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(4) Scan parquet spark_catalog.default.inventory +(4) CometScan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#6, inv_quantity_on_hand#7, inv_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -71,7 +71,7 @@ Arguments: [i_item_sk#1], [inv_item_sk#6], Inner, BuildRight Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_item_sk#6, inv_date_sk#8] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8], [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date_sk#8] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(17) Scan parquet spark_catalog.default.store_sales +(17) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#12, ss_sold_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -158,7 +158,7 @@ BroadcastExchange (31) +- CometScan parquet spark_catalog.default.date_dim (27) -(27) Scan parquet spark_catalog.default.date_dim +(27) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83/explain.txt index ffd8a5eb1..a84354c40 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83/explain.txt @@ -49,7 +49,7 @@ +- ReusedExchange (38) -(1) Scan parquet spark_catalog.default.store_returns +(1) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -61,7 +61,7 @@ ReadSchema: struct Input [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3] Condition : isnotnull(sr_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_item_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -85,7 +85,7 @@ Arguments: [sr_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [5]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3, i_item_sk#5, i_item_id#6] Arguments: [sr_return_quantity#2, sr_returned_date_sk#3, i_item_id#6], [sr_return_quantity#2, sr_returned_date_sk#3, i_item_id#6] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -135,7 +135,7 @@ Input [2]: [i_item_id#6, sum#10] Keys [1]: [i_item_id#6] Functions [1]: [sum(sr_return_quantity#2)] -(19) Scan parquet spark_catalog.default.catalog_returns +(19) CometScan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#11, cr_return_quantity#12, cr_returned_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -198,7 +198,7 @@ Arguments: [item_id#21], [item_id#19], Inner, BuildRight Input [4]: [item_id#21, sr_item_qty#22, item_id#19, cr_item_qty#20] Arguments: [item_id#21, sr_item_qty#22, cr_item_qty#20], [item_id#21, sr_item_qty#22, cr_item_qty#20] -(33) Scan parquet spark_catalog.default.web_returns +(33) CometScan parquet spark_catalog.default.web_returns Output [3]: [wr_item_sk#23, wr_return_quantity#24, wr_returned_date_sk#25] Batched: true Location: InMemoryFileIndex [] @@ -287,7 +287,7 @@ BroadcastExchange (62) +- CometScan parquet spark_catalog.default.date_dim (52) -(49) Scan parquet spark_catalog.default.date_dim +(49) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#8] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -298,13 +298,13 @@ ReadSchema: struct Input [2]: [d_date_sk#7, d_date#8] Condition : isnotnull(d_date_sk#7) -(51) Scan parquet spark_catalog.default.date_dim +(51) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#9, d_week_seq#37] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] ReadSchema: struct -(52) Scan parquet spark_catalog.default.date_dim +(52) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date#38, d_week_seq#39] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q84/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q84/explain.txt index 468af40d2..572fd7a66 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q84/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q84/explain.txt @@ -33,7 +33,7 @@ TakeOrderedAndProject (32) +- CometScan parquet spark_catalog.default.store_returns (26) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [6]: [c_customer_id#1, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, c_first_name#5, c_last_name#6] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -44,7 +44,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -64,7 +64,7 @@ Arguments: [ws_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3, d_date_sk#5] Arguments: [ws_item_sk#1, ws_net_paid#2], [ws_item_sk#1, ws_net_paid#2] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#7, i_class#8, i_category#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -139,7 +139,7 @@ BroadcastExchange (28) +- CometScan parquet spark_catalog.default.date_dim (24) -(24) Scan parquet spark_catalog.default.date_dim +(24) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q87/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q87/explain.txt index d023b5b31..28b942e2f 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q87/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q87/explain.txt @@ -50,7 +50,7 @@ +- ReusedExchange (37) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Batched: true Location: InMemoryFileIndex [] @@ -62,7 +62,7 @@ ReadSchema: struct Input [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Condition : isnotnull(ss_customer_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -90,7 +90,7 @@ Arguments: [ss_sold_date_sk#2], [d_date_sk#4], Inner, BuildRight Input [4]: [ss_customer_sk#1, ss_sold_date_sk#2, d_date_sk#4, d_date#5] Arguments: [ss_customer_sk#1, d_date#5], [ss_customer_sk#1, d_date#5] -(9) Scan parquet spark_catalog.default.customer +(9) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#7, c_first_name#8, c_last_name#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -131,7 +131,7 @@ Functions: [] (17) ColumnarToRow [codegen id : 3] Input [3]: [c_last_name#9, c_first_name#8, d_date#5] -(18) Scan parquet spark_catalog.default.catalog_sales +(18) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_bill_customer_sk#10, cs_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -194,7 +194,7 @@ Right keys [6]: [coalesce(c_last_name#17, ), isnull(c_last_name#17), coalesce(c_ Join type: LeftAnti Join condition: None -(32) Scan parquet spark_catalog.default.web_sales +(32) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#18, ws_sold_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -289,7 +289,7 @@ BroadcastExchange (54) +- CometScan parquet spark_catalog.default.date_dim (50) -(50) Scan parquet spark_catalog.default.date_dim +(50) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q88/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q88/explain.txt index 1dface7ea..592e23cd2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q88/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q88/explain.txt @@ -173,7 +173,7 @@ +- ReusedExchange (164) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -188,7 +188,7 @@ Condition : ((isnotnull(ss_hdemo_sk#2) AND isnotnull(ss_sold_time_sk#1)) AND isn Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Arguments: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3], [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3] -(4) Scan parquet spark_catalog.default.household_demographics +(4) CometScan parquet spark_catalog.default.household_demographics Output [3]: [hd_demo_sk#5, hd_dep_count#6, hd_vehicle_count#7] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -216,7 +216,7 @@ Arguments: [ss_hdemo_sk#2], [hd_demo_sk#5], Inner, BuildRight Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, hd_demo_sk#5] Arguments: [ss_sold_time_sk#1, ss_store_sk#3], [ss_sold_time_sk#1, ss_store_sk#3] -(10) Scan parquet spark_catalog.default.time_dim +(10) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#8, t_hour#9, t_minute#10] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -244,7 +244,7 @@ Arguments: [ss_sold_time_sk#1], [t_time_sk#8], Inner, BuildRight Input [3]: [ss_sold_time_sk#1, ss_store_sk#3, t_time_sk#8] Arguments: [ss_store_sk#3], [ss_store_sk#3] -(16) Scan parquet spark_catalog.default.store +(16) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#11, s_store_name#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -288,7 +288,7 @@ Functions [1]: [count(1)] (25) ColumnarToRow [codegen id : 8] Input [1]: [h8_30_to_9#14] -(26) Scan parquet spark_catalog.default.store_sales +(26) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#15, ss_hdemo_sk#16, ss_store_sk#17, ss_sold_date_sk#18] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -315,7 +315,7 @@ Arguments: [ss_hdemo_sk#16], [hd_demo_sk#19], Inner, BuildRight Input [4]: [ss_sold_time_sk#15, ss_hdemo_sk#16, ss_store_sk#17, hd_demo_sk#19] Arguments: [ss_sold_time_sk#15, ss_store_sk#17], [ss_sold_time_sk#15, ss_store_sk#17] -(32) Scan parquet spark_catalog.default.time_dim +(32) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#20, t_hour#21, t_minute#22] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -379,7 +379,7 @@ Arguments: IdentityBroadcastMode, [plan_id=3] Join type: Inner Join condition: None -(47) Scan parquet spark_catalog.default.store_sales +(47) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#26, ss_hdemo_sk#27, ss_store_sk#28, ss_sold_date_sk#29] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -406,7 +406,7 @@ Arguments: [ss_hdemo_sk#27], [hd_demo_sk#30], Inner, BuildRight Input [4]: [ss_sold_time_sk#26, ss_hdemo_sk#27, ss_store_sk#28, hd_demo_sk#30] Arguments: [ss_sold_time_sk#26, ss_store_sk#28], [ss_sold_time_sk#26, ss_store_sk#28] -(53) Scan parquet spark_catalog.default.time_dim +(53) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#31, t_hour#32, t_minute#33] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -470,7 +470,7 @@ Arguments: IdentityBroadcastMode, [plan_id=5] Join type: Inner Join condition: None -(68) Scan parquet spark_catalog.default.store_sales +(68) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#37, ss_hdemo_sk#38, ss_store_sk#39, ss_sold_date_sk#40] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -497,7 +497,7 @@ Arguments: [ss_hdemo_sk#38], [hd_demo_sk#41], Inner, BuildRight Input [4]: [ss_sold_time_sk#37, ss_hdemo_sk#38, ss_store_sk#39, hd_demo_sk#41] Arguments: [ss_sold_time_sk#37, ss_store_sk#39], [ss_sold_time_sk#37, ss_store_sk#39] -(74) Scan parquet spark_catalog.default.time_dim +(74) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#42, t_hour#43, t_minute#44] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -561,7 +561,7 @@ Arguments: IdentityBroadcastMode, [plan_id=7] Join type: Inner Join condition: None -(89) Scan parquet spark_catalog.default.store_sales +(89) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#48, ss_hdemo_sk#49, ss_store_sk#50, ss_sold_date_sk#51] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -588,7 +588,7 @@ Arguments: [ss_hdemo_sk#49], [hd_demo_sk#52], Inner, BuildRight Input [4]: [ss_sold_time_sk#48, ss_hdemo_sk#49, ss_store_sk#50, hd_demo_sk#52] Arguments: [ss_sold_time_sk#48, ss_store_sk#50], [ss_sold_time_sk#48, ss_store_sk#50] -(95) Scan parquet spark_catalog.default.time_dim +(95) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#53, t_hour#54, t_minute#55] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -652,7 +652,7 @@ Arguments: IdentityBroadcastMode, [plan_id=9] Join type: Inner Join condition: None -(110) Scan parquet spark_catalog.default.store_sales +(110) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#59, ss_hdemo_sk#60, ss_store_sk#61, ss_sold_date_sk#62] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -679,7 +679,7 @@ Arguments: [ss_hdemo_sk#60], [hd_demo_sk#63], Inner, BuildRight Input [4]: [ss_sold_time_sk#59, ss_hdemo_sk#60, ss_store_sk#61, hd_demo_sk#63] Arguments: [ss_sold_time_sk#59, ss_store_sk#61], [ss_sold_time_sk#59, ss_store_sk#61] -(116) Scan parquet spark_catalog.default.time_dim +(116) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#64, t_hour#65, t_minute#66] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -743,7 +743,7 @@ Arguments: IdentityBroadcastMode, [plan_id=11] Join type: Inner Join condition: None -(131) Scan parquet spark_catalog.default.store_sales +(131) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#70, ss_hdemo_sk#71, ss_store_sk#72, ss_sold_date_sk#73] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -770,7 +770,7 @@ Arguments: [ss_hdemo_sk#71], [hd_demo_sk#74], Inner, BuildRight Input [4]: [ss_sold_time_sk#70, ss_hdemo_sk#71, ss_store_sk#72, hd_demo_sk#74] Arguments: [ss_sold_time_sk#70, ss_store_sk#72], [ss_sold_time_sk#70, ss_store_sk#72] -(137) Scan parquet spark_catalog.default.time_dim +(137) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#75, t_hour#76, t_minute#77] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -834,7 +834,7 @@ Arguments: IdentityBroadcastMode, [plan_id=13] Join type: Inner Join condition: None -(152) Scan parquet spark_catalog.default.store_sales +(152) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#81, ss_hdemo_sk#82, ss_store_sk#83, ss_sold_date_sk#84] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -861,7 +861,7 @@ Arguments: [ss_hdemo_sk#82], [hd_demo_sk#85], Inner, BuildRight Input [4]: [ss_sold_time_sk#81, ss_hdemo_sk#82, ss_store_sk#83, hd_demo_sk#85] Arguments: [ss_sold_time_sk#81, ss_store_sk#83], [ss_sold_time_sk#81, ss_store_sk#83] -(158) Scan parquet spark_catalog.default.time_dim +(158) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#86, t_hour#87, t_minute#88] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q89/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q89/explain.txt index 6789e5b18..143ff0868 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q89/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q89/explain.txt @@ -29,7 +29,7 @@ TakeOrderedAndProject (28) +- CometScan parquet spark_catalog.default.store (14) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -40,7 +40,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_ext_discount_amt#2, ws_sold_date_sk#3] Condition : (isnotnull(ws_item_sk#1) AND isnotnull(ws_ext_discount_amt#2)) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_manufact_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -71,7 +71,7 @@ Arguments: [ws_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [4]: [ws_item_sk#1, ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5] Arguments: [ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5], [ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5] -(9) Scan parquet spark_catalog.default.web_sales +(9) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#7, ws_ext_discount_amt#8, ws_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -83,7 +83,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#7, ws_ext_discount_amt#8, ws_sold_date_sk#9] Condition : isnotnull(ws_item_sk#7) -(11) Scan parquet spark_catalog.default.date_dim +(11) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -181,7 +181,7 @@ BroadcastExchange (35) +- CometScan parquet spark_catalog.default.date_dim (31) -(31) Scan parquet spark_catalog.default.date_dim +(31) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q93/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q93/explain.txt index 4c672e397..dc64f3c4c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q93/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q93/explain.txt @@ -23,7 +23,7 @@ +- CometScan parquet spark_catalog.default.reason (12) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -41,7 +41,7 @@ Arguments: hashpartitioning(ss_item_sk#1, ss_ticket_number#3, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5], [ss_item_sk#1 ASC NULLS FIRST, ss_ticket_number#3 ASC NULLS FIRST] -(5) Scan parquet spark_catalog.default.store_returns +(5) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#7, sr_reason_sk#8, sr_ticket_number#9, sr_return_quantity#10, sr_returned_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -73,7 +73,7 @@ Arguments: [ss_item_sk#1, ss_ticket_number#3], [sr_item_sk#7, sr_ticket_number#9 Input [9]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5, sr_item_sk#7, sr_reason_sk#8, sr_ticket_number#9, sr_return_quantity#10] Arguments: [ss_customer_sk#2, ss_quantity#4, ss_sales_price#5, sr_reason_sk#8, sr_return_quantity#10], [ss_customer_sk#2, ss_quantity#4, ss_sales_price#5, sr_reason_sk#8, sr_return_quantity#10] -(12) Scan parquet spark_catalog.default.reason +(12) CometScan parquet spark_catalog.default.reason Output [2]: [r_reason_sk#12, r_reason_desc#13] Batched: true Location [not included in comparison]/{warehouse_dir}/reason] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt index 8f23b9979..4dd9246cd 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt @@ -41,7 +41,7 @@ +- CometScan parquet spark_catalog.default.web_site (29) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [8]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, ws_sold_date_sk#8] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -64,7 +64,7 @@ Arguments: hashpartitioning(ws_order_number#5, 5), ENSURE_REQUIREMENTS, CometNat Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Arguments: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_order_number#5 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.web_sales +(6) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_warehouse_sk#9, ws_order_number#10, ws_sold_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -91,7 +91,7 @@ Arguments: [ws_order_number#5], [ws_order_number#10], LeftSemi, NOT (ws_warehous Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Arguments: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(12) Scan parquet spark_catalog.default.web_returns +(12) CometScan parquet spark_catalog.default.web_returns Output [2]: [wr_order_number#12, wr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -114,7 +114,7 @@ Left output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_ord Right output [1]: [wr_order_number#12] Arguments: [ws_order_number#5], [wr_order_number#12], LeftAnti -(17) Scan parquet spark_catalog.default.date_dim +(17) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -142,7 +142,7 @@ Arguments: [ws_ship_date_sk#1], [d_date_sk#14], Inner, BuildRight Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, d_date_sk#14] Arguments: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(23) Scan parquet spark_catalog.default.customer_address +(23) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_state#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -170,7 +170,7 @@ Arguments: [ws_ship_addr_sk#2], [ca_address_sk#16], Inner, BuildRight Input [6]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, ca_address_sk#16] Arguments: [ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7], [ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(29) Scan parquet spark_catalog.default.web_site +(29) CometScan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#18, web_company_name#19] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt index 920e23faf..2e951a408 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt @@ -54,7 +54,7 @@ +- CometScan parquet spark_catalog.default.web_site (42) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, ws_sold_date_sk#7] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -77,7 +77,7 @@ Arguments: hashpartitioning(ws_order_number#4, 5), ENSURE_REQUIREMENTS, CometNat Input [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] Arguments: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6], [ws_order_number#4 ASC NULLS FIRST] -(6) Scan parquet spark_catalog.default.web_sales +(6) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_warehouse_sk#8, ws_order_number#9, ws_sold_date_sk#10] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -121,7 +121,7 @@ Left output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_ord Right output [1]: [ws_order_number#9] Arguments: [ws_order_number#4], [ws_order_number#9], LeftSemi -(16) Scan parquet spark_catalog.default.web_returns +(16) CometScan parquet spark_catalog.default.web_returns Output [2]: [wr_order_number#13, wr_returned_date_sk#14] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -181,7 +181,7 @@ Left output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_ord Right output [1]: [wr_order_number#13] Arguments: [ws_order_number#4], [wr_order_number#13], LeftSemi -(30) Scan parquet spark_catalog.default.date_dim +(30) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#15, d_date#16] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -209,7 +209,7 @@ Arguments: [ws_ship_date_sk#1], [d_date_sk#15], Inner, BuildRight Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, d_date_sk#15] Arguments: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6], [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] -(36) Scan parquet spark_catalog.default.customer_address +(36) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#17, ca_state#18] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -237,7 +237,7 @@ Arguments: [ws_ship_addr_sk#2], [ca_address_sk#17], Inner, BuildRight Input [6]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, ca_address_sk#17] Arguments: [ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6], [ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] -(42) Scan parquet spark_catalog.default.web_site +(42) CometScan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#19, web_company_name#20] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q96/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q96/explain.txt index c4b2cf973..d2e63bee2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q96/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q96/explain.txt @@ -26,7 +26,7 @@ +- CometScan parquet spark_catalog.default.store (16) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -41,7 +41,7 @@ Condition : ((isnotnull(ss_hdemo_sk#2) AND isnotnull(ss_sold_time_sk#1)) AND isn Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Arguments: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3], [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3] -(4) Scan parquet spark_catalog.default.household_demographics +(4) CometScan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#5, hd_dep_count#6] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -69,7 +69,7 @@ Arguments: [ss_hdemo_sk#2], [hd_demo_sk#5], Inner, BuildRight Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, hd_demo_sk#5] Arguments: [ss_sold_time_sk#1, ss_store_sk#3], [ss_sold_time_sk#1, ss_store_sk#3] -(10) Scan parquet spark_catalog.default.time_dim +(10) CometScan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#7, t_hour#8, t_minute#9] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -97,7 +97,7 @@ Arguments: [ss_sold_time_sk#1], [t_time_sk#7], Inner, BuildRight Input [3]: [ss_sold_time_sk#1, ss_store_sk#3, t_time_sk#7] Arguments: [ss_store_sk#3], [ss_store_sk#3] -(16) Scan parquet spark_catalog.default.store +(16) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#10, s_store_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q97/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q97/explain.txt index 2eec0079d..1c6e9b78c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q97/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q97/explain.txt @@ -26,14 +26,14 @@ +- ReusedExchange (13) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_customer_sk#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] PartitionFilters: [isnotnull(ss_sold_date_sk#3), dynamicpruningexpression(ss_sold_date_sk#3 IN dynamicpruning#4)] ReadSchema: struct -(2) Scan parquet spark_catalog.default.date_dim +(2) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -79,7 +79,7 @@ Functions: [] Input [2]: [customer_sk#7, item_sk#8] Arguments: [customer_sk#7, item_sk#8], [customer_sk#7 ASC NULLS FIRST, item_sk#8 ASC NULLS FIRST] -(12) Scan parquet spark_catalog.default.catalog_sales +(12) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#9, cs_item_sk#10, cs_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -152,7 +152,7 @@ BroadcastExchange (30) +- CometScan parquet spark_catalog.default.date_dim (26) -(26) Scan parquet spark_catalog.default.date_dim +(26) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt index 0a9810928..593065ba0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt @@ -26,7 +26,7 @@ +- CometScan parquet spark_catalog.default.date_dim (8) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -38,7 +38,7 @@ ReadSchema: struct Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -62,7 +62,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -148,7 +148,7 @@ BroadcastExchange (30) +- CometScan parquet spark_catalog.default.date_dim (26) -(26) Scan parquet spark_catalog.default.date_dim +(26) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q99/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q99/explain.txt index 7935bb4c6..6dfcf8b32 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q99/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q99/explain.txt @@ -29,7 +29,7 @@ +- CometScan parquet spark_catalog.default.date_dim (18) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [5]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_ship_mode_sk#3, cs_warehouse_sk#4, cs_sold_date_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -40,7 +40,7 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -95,7 +95,7 @@ Left output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Right output [1]: [ss_customer_sk#4] Arguments: [c_customer_sk#1], [ss_customer_sk#4], LeftSemi, BuildRight -(12) Scan parquet spark_catalog.default.web_sales +(12) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -114,7 +114,7 @@ Arguments: [ws_sold_date_sk#11], [d_date_sk#13], Inner, BuildRight Input [3]: [ws_bill_customer_sk#10, ws_sold_date_sk#11, d_date_sk#13] Arguments: [customer_sk#14], [ws_bill_customer_sk#10 AS customer_sk#14] -(16) Scan parquet spark_catalog.default.catalog_sales +(16) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#15, cs_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -150,7 +150,7 @@ Arguments: [c_customer_sk#1], [customer_sk#14], LeftSemi, BuildRight Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Arguments: [c_current_cdemo_sk#2, c_current_addr_sk#3], [c_current_cdemo_sk#2, c_current_addr_sk#3] -(24) Scan parquet spark_catalog.default.customer_address +(24) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_county#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -178,7 +178,7 @@ Arguments: [c_current_addr_sk#3], [ca_address_sk#20], Inner, BuildRight Input [3]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#20] Arguments: [c_current_cdemo_sk#2], [c_current_cdemo_sk#2] -(30) Scan parquet spark_catalog.default.customer_demographics +(30) CometScan parquet spark_catalog.default.customer_demographics Output [9]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_education_status#25, cd_purchase_estimate#26, cd_credit_rating#27, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -233,7 +233,7 @@ BroadcastExchange (44) +- CometScan parquet spark_catalog.default.date_dim (40) -(40) Scan parquet spark_catalog.default.date_dim +(40) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q11/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q11/explain.txt index 52d523ca7..689697306 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q11/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q11/explain.txt @@ -69,7 +69,7 @@ +- ReusedExchange (58) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -80,7 +80,7 @@ ReadSchema: struct 0.00)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#19, c_customer_id#20, c_first_name#21, c_last_name#22, c_preferred_cust_flag#23, c_birth_country#24, c_login#25, c_email_address#26] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -158,7 +158,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [ws_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q14/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q14/explain.txt index c30e67082..130726664 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q14/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q14/explain.txt @@ -86,7 +86,7 @@ +- CometScan parquet spark_catalog.default.date_dim (72) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -109,7 +109,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -132,7 +132,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -168,7 +168,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -244,7 +244,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -311,7 +311,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -343,7 +343,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -389,7 +389,7 @@ Functions [2]: [sum((cast(ss_quantity#2 as decimal(10,0)) * ss_list_price#3)), c Input [6]: [channel#50, i_brand_id#40, i_class_id#41, i_category_id#42, sales#51, number_sales#52] Condition : (isnotnull(sales#51) AND (cast(sales#51 as decimal(32,6)) > cast(Subquery scalar-subquery#53, [id=#54] as decimal(32,6)))) -(65) Scan parquet spark_catalog.default.store_sales +(65) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#55, ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -421,7 +421,7 @@ Arguments: [ss_item_sk#55], [i_item_sk#61], Inner, BuildRight Input [8]: [ss_item_sk#55, ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_item_sk#61, i_brand_id#62, i_class_id#63, i_category_id#64] Arguments: [ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_brand_id#62, i_class_id#63, i_category_id#64], [ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_brand_id#62, i_class_id#63, i_category_id#64] -(72) Scan parquet spark_catalog.default.date_dim +(72) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#65, d_week_seq#66] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -505,7 +505,7 @@ Subquery:1 Hosting operator id = 64 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (95) -(86) Scan parquet spark_catalog.default.store_sales +(86) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#75, ss_list_price#76, ss_sold_date_sk#77] Batched: true Location: InMemoryFileIndex [] @@ -524,7 +524,7 @@ Arguments: [ss_sold_date_sk#77], [d_date_sk#79], Inner, BuildRight Input [4]: [ss_quantity#75, ss_list_price#76, ss_sold_date_sk#77, d_date_sk#79] Arguments: [quantity#80, list_price#81], [ss_quantity#75 AS quantity#80, ss_list_price#76 AS list_price#81] -(90) Scan parquet spark_catalog.default.catalog_sales +(90) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#82, cs_list_price#83, cs_sold_date_sk#84] Batched: true Location: InMemoryFileIndex [] @@ -543,7 +543,7 @@ Arguments: [cs_sold_date_sk#84], [d_date_sk#86], Inner, BuildRight Input [4]: [cs_quantity#82, cs_list_price#83, cs_sold_date_sk#84, d_date_sk#86] Arguments: [quantity#87, list_price#88], [cs_quantity#82 AS quantity#87, cs_list_price#83 AS list_price#88] -(94) Scan parquet spark_catalog.default.web_sales +(94) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#89, ws_list_price#90, ws_sold_date_sk#91] Batched: true Location: InMemoryFileIndex [] @@ -598,7 +598,7 @@ BroadcastExchange (107) +- CometScan parquet spark_catalog.default.date_dim (103) -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -627,7 +627,7 @@ Subquery:6 Hosting operator id = 104 Hosting Expression = Subquery scalar-subque +- CometScan parquet spark_catalog.default.date_dim (108) -(108) Scan parquet spark_catalog.default.date_dim +(108) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#99, d_year#100, d_moy#101, d_dom#102] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -653,7 +653,7 @@ BroadcastExchange (116) +- CometScan parquet spark_catalog.default.date_dim (112) -(112) Scan parquet spark_catalog.default.date_dim +(112) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#103] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -691,7 +691,7 @@ BroadcastExchange (121) +- CometScan parquet spark_catalog.default.date_dim (117) -(117) Scan parquet spark_catalog.default.date_dim +(117) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#65, d_week_seq#66] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -720,7 +720,7 @@ Subquery:13 Hosting operator id = 118 Hosting Expression = Subquery scalar-subqu +- CometScan parquet spark_catalog.default.date_dim (122) -(122) Scan parquet spark_catalog.default.date_dim +(122) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#104, d_year#105, d_moy#106, d_dom#107] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q14a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q14a/explain.txt index 6d048ce94..c5c211461 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q14a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q14a/explain.txt @@ -123,7 +123,7 @@ +- ReusedExchange (112) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -146,7 +146,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -169,7 +169,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -205,7 +205,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -281,7 +281,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -348,7 +348,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -380,7 +380,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -426,7 +426,7 @@ Functions [2]: [sum((cast(ss_quantity#2 as decimal(10,0)) * ss_list_price#3)), c Input [6]: [channel#49, i_brand_id#40, i_class_id#41, i_category_id#42, sales#50, number_sales#51] Condition : (isnotnull(sales#50) AND (cast(sales#50 as decimal(32,6)) > cast(Subquery scalar-subquery#52, [id=#53] as decimal(32,6)))) -(65) Scan parquet spark_catalog.default.catalog_sales +(65) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#54, cs_quantity#55, cs_list_price#56, cs_sold_date_sk#57] Batched: true Location: InMemoryFileIndex [] @@ -488,7 +488,7 @@ Functions [2]: [sum((cast(cs_quantity#55 as decimal(10,0)) * cs_list_price#56)), Input [6]: [channel#68, i_brand_id#61, i_class_id#62, i_category_id#63, sales#69, number_sales#70] Condition : (isnotnull(sales#69) AND (cast(sales#69 as decimal(32,6)) > cast(ReusedSubquery Subquery scalar-subquery#52, [id=#53] as decimal(32,6)))) -(79) Scan parquet spark_catalog.default.web_sales +(79) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#71, ws_quantity#72, ws_list_price#73, ws_sold_date_sk#74] Batched: true Location: InMemoryFileIndex [] @@ -710,7 +710,7 @@ Subquery:1 Hosting operator id = 64 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (135) -(123) Scan parquet spark_catalog.default.store_sales +(123) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#143, ss_list_price#144, ss_sold_date_sk#145] Batched: true Location: InMemoryFileIndex [] @@ -729,14 +729,14 @@ Arguments: [ss_sold_date_sk#145], [d_date_sk#147], Inner, BuildRight Input [4]: [ss_quantity#143, ss_list_price#144, ss_sold_date_sk#145, d_date_sk#147] Arguments: [quantity#148, list_price#149], [ss_quantity#143 AS quantity#148, ss_list_price#144 AS list_price#149] -(127) Scan parquet spark_catalog.default.catalog_sales +(127) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#150, cs_list_price#151, cs_sold_date_sk#152] Batched: true Location: InMemoryFileIndex [] PartitionFilters: [isnotnull(cs_sold_date_sk#152), dynamicpruningexpression(cs_sold_date_sk#152 IN dynamicpruning#153)] ReadSchema: struct -(128) Scan parquet spark_catalog.default.date_dim +(128) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#154, d_year#155] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -764,7 +764,7 @@ Arguments: [cs_sold_date_sk#152], [d_date_sk#154], Inner, BuildRight Input [4]: [cs_quantity#150, cs_list_price#151, cs_sold_date_sk#152, d_date_sk#154] Arguments: [quantity#156, list_price#157], [cs_quantity#150 AS quantity#156, cs_list_price#151 AS list_price#157] -(134) Scan parquet spark_catalog.default.web_sales +(134) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#158, ws_list_price#159, ws_sold_date_sk#160] Batched: true Location: InMemoryFileIndex [] @@ -815,7 +815,7 @@ BroadcastExchange (147) +- CometScan parquet spark_catalog.default.date_dim (143) -(143) Scan parquet spark_catalog.default.date_dim +(143) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#154, d_year#155] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -847,7 +847,7 @@ BroadcastExchange (152) +- CometScan parquet spark_catalog.default.date_dim (148) -(148) Scan parquet spark_catalog.default.date_dim +(148) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -877,7 +877,7 @@ BroadcastExchange (157) +- CometScan parquet spark_catalog.default.date_dim (153) -(153) Scan parquet spark_catalog.default.date_dim +(153) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#168] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q18a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q18a/explain.txt index 322481b94..e227c81b8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q18a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q18a/explain.txt @@ -144,7 +144,7 @@ +- CometScan parquet spark_catalog.default.item (133) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -156,7 +156,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Condition : isnotnull(cs_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [cs_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q22/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q22/explain.txt index 2810779ed..34d510fc7 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q22/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q22/explain.txt @@ -25,7 +25,7 @@ TakeOrderedAndProject (24) +- CometScan parquet spark_catalog.default.warehouse (15) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ ReadSchema: struct Input [3]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3] Condition : isnotnull(inv_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -65,7 +65,7 @@ Arguments: [inv_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3, d_date_sk#5] Arguments: [inv_item_sk#1, inv_quantity_on_hand#2], [inv_item_sk#1, inv_quantity_on_hand#2] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#7, i_brand#8, i_class#9, i_category#10, i_product_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -92,7 +92,7 @@ Arguments: [inv_quantity_on_hand#2, i_brand#8, i_class#9, i_category#10, i_produ (14) ColumnarToRow [codegen id : 2] Input [5]: [inv_quantity_on_hand#2, i_brand#8, i_class#9, i_category#10, i_product_name#11] -(15) Scan parquet spark_catalog.default.warehouse +(15) CometScan parquet spark_catalog.default.warehouse Output: [] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -149,7 +149,7 @@ BroadcastExchange (29) +- CometScan parquet spark_catalog.default.date_dim (25) -(25) Scan parquet spark_catalog.default.date_dim +(25) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q22a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q22a/explain.txt index 90324f2cc..deae56511 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q22a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q22a/explain.txt @@ -47,7 +47,7 @@ +- ReusedExchange (39) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -59,7 +59,7 @@ ReadSchema: struct= 15) AND (cnt#17 <= 20)) -(25) Scan parquet spark_catalog.default.customer +(25) CometScan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -192,7 +192,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_dom#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q35/explain.txt index 790000085..5933ea5fa 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q35/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q35/explain.txt @@ -45,7 +45,7 @@ TakeOrderedAndProject (44) +- CometScan parquet spark_catalog.default.customer_demographics (35) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -56,14 +56,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#12, ws_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ Right keys [1]: [ws_bill_customer_sk#12] Join type: ExistenceJoin(exists#2) Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -175,7 +175,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(29) Scan parquet spark_catalog.default.customer_address +(29) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_state#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -203,7 +203,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#4, ca_state#21] Input [4]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#20, ca_state#21] -(35) Scan parquet spark_catalog.default.customer_demographics +(35) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_dep_count#25, cd_dep_employed_count#26, cd_dep_college_count#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q35a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q35a/explain.txt index 9f321ebda..c5bb66987 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q35a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q35a/explain.txt @@ -39,7 +39,7 @@ +- CometScan parquet spark_catalog.default.customer_demographics (29) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -50,14 +50,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_qoy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -94,7 +94,7 @@ Left output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Right output [1]: [ss_customer_sk#4] Arguments: [c_customer_sk#1], [ss_customer_sk#4], LeftSemi, BuildRight -(12) Scan parquet spark_catalog.default.web_sales +(12) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -113,7 +113,7 @@ Arguments: [ws_sold_date_sk#11], [d_date_sk#13], Inner, BuildRight Input [3]: [ws_bill_customer_sk#10, ws_sold_date_sk#11, d_date_sk#13] Arguments: [customsk#14], [ws_bill_customer_sk#10 AS customsk#14] -(16) Scan parquet spark_catalog.default.catalog_sales +(16) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#15, cs_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -149,7 +149,7 @@ Arguments: [c_customer_sk#1], [customsk#14], LeftSemi, BuildRight Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Arguments: [c_current_cdemo_sk#2, c_current_addr_sk#3], [c_current_cdemo_sk#2, c_current_addr_sk#3] -(24) Scan parquet spark_catalog.default.customer_address +(24) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_state#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -173,7 +173,7 @@ Arguments: [c_current_addr_sk#3], [ca_address_sk#20], Inner, BuildRight Input [4]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#20, ca_state#21] Arguments: [c_current_cdemo_sk#2, ca_state#21], [c_current_cdemo_sk#2, ca_state#21] -(29) Scan parquet spark_catalog.default.customer_demographics +(29) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_dep_count#25, cd_dep_employed_count#26, cd_dep_college_count#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -228,7 +228,7 @@ BroadcastExchange (43) +- CometScan parquet spark_catalog.default.date_dim (39) -(39) Scan parquet spark_catalog.default.date_dim +(39) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_qoy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q36a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q36a/explain.txt index e1b4302ae..3dab08a14 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q36a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q36a/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- ReusedExchange (28) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -55,7 +55,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.store_sales +(3) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ Arguments: [i_item_sk#1], [ss_item_sk#4], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -108,7 +108,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -263,7 +263,7 @@ BroadcastExchange (51) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q49/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q49/explain.txt index 7d9198ed5..685f048de 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q49/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q49/explain.txt @@ -78,7 +78,7 @@ TakeOrderedAndProject (77) +- ReusedExchange (59) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [6]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_net_profit#5, ws_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_so Input [5]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] -(5) Scan parquet spark_catalog.default.web_returns +(5) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11, wr_returned_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -122,7 +122,7 @@ Arguments: [ws_order_number#2, ws_item_sk#1], [wr_order_number#9, wr_item_sk#8], Input [9]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11] Arguments: [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11], [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -195,7 +195,7 @@ Condition : ((return_rank#25 <= 10) OR (currency_rank#26 <= 10)) Output [5]: [web AS channel#27, item#22, return_ratio#23, return_rank#25, currency_rank#26] Input [5]: [item#22, return_ratio#23, currency_ratio#24, return_rank#25, currency_rank#26] -(27) Scan parquet spark_catalog.default.catalog_sales +(27) CometScan parquet spark_catalog.default.catalog_sales Output [6]: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_net_profit#32, cs_sold_date_sk#33] Batched: true Location: InMemoryFileIndex [] @@ -215,7 +215,7 @@ Arguments: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, c Input [5]: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_sold_date_sk#33] Arguments: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_sold_date_sk#33] -(31) Scan parquet spark_catalog.default.catalog_returns +(31) CometScan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#35, cr_order_number#36, cr_return_quantity#37, cr_return_amount#38, cr_returned_date_sk#39] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -296,7 +296,7 @@ Condition : ((return_rank#50 <= 10) OR (currency_rank#51 <= 10)) Output [5]: [catalog AS channel#52, item#47, return_ratio#48, return_rank#50, currency_rank#51] Input [5]: [item#47, return_ratio#48, currency_ratio#49, return_rank#50, currency_rank#51] -(50) Scan parquet spark_catalog.default.store_sales +(50) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_net_profit#57, ss_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -316,7 +316,7 @@ Arguments: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, Input [5]: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_sold_date_sk#58] Arguments: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_sold_date_sk#58] -(54) Scan parquet spark_catalog.default.store_returns +(54) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#60, sr_ticket_number#61, sr_return_quantity#62, sr_return_amt#63, sr_returned_date_sk#64] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -431,7 +431,7 @@ BroadcastExchange (82) +- CometScan parquet spark_catalog.default.date_dim (78) -(78) Scan parquet spark_catalog.default.date_dim +(78) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q51a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q51a/explain.txt index 5d728a1c5..3211e46f6 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q51a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q51a/explain.txt @@ -73,7 +73,7 @@ TakeOrderedAndProject (72) +- ReusedExchange (62) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -85,7 +85,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -204,7 +204,7 @@ Arguments: hashpartitioning(item_sk#9, d_date#6, 5), ENSURE_REQUIREMENTS, [plan_ Input [3]: [item_sk#9, d_date#6, cume_sales#22] Arguments: [item_sk#9 ASC NULLS FIRST, d_date#6 ASC NULLS FIRST], false, 0 -(30) Scan parquet spark_catalog.default.store_sales +(30) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#23, ss_sales_price#24, ss_sold_date_sk#25] Batched: true Location: InMemoryFileIndex [] @@ -406,7 +406,7 @@ BroadcastExchange (77) +- CometScan parquet spark_catalog.default.date_dim (73) -(73) Scan parquet spark_catalog.default.date_dim +(73) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q57/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q57/explain.txt index 97f571929..4b3a3c576 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q57/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q57/explain.txt @@ -48,7 +48,7 @@ TakeOrderedAndProject (47) +- ReusedExchange (39) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#1, i_brand#2, i_category#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.catalog_sales +(3) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ Arguments: [i_item_sk#1], [cs_item_sk#5], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -108,7 +108,7 @@ Arguments: [cs_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.call_center +(13) CometScan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#12, cc_name#13] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] @@ -263,7 +263,7 @@ BroadcastExchange (51) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q5a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q5a/explain.txt index 02eab7ed1..3e1b88c26 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q5a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q5a/explain.txt @@ -84,7 +84,7 @@ TakeOrderedAndProject (83) +- ReusedExchange (74) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ Condition : isnotnull(ss_store_sk#1) Input [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Arguments: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11], [ss_store_sk#1 AS store_sk#6, ss_sold_date_sk#4 AS date_sk#7, ss_ext_sales_price#2 AS sales_price#8, ss_net_profit#3 AS profit#9, 0.00 AS return_amt#10, 0.00 AS net_loss#11] -(4) Scan parquet spark_catalog.default.store_returns +(4) CometScan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -120,7 +120,7 @@ Arguments: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, n Child 0 Input [6]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11] Child 1 Input [6]: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, net_loss#21] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -148,7 +148,7 @@ Arguments: [date_sk#7], [d_date_sk#22], Inner, BuildRight Input [7]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11, d_date_sk#22] Arguments: [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11], [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11] -(14) Scan parquet spark_catalog.default.store +(14) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#24, s_store_id#25] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -191,7 +191,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#8)), sum(UnscaledValue(return_amt# Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#8))#30, sum(UnscaledValue(return_amt#10))#31, sum(UnscaledValue(profit#9))#32, sum(UnscaledValue(net_loss#11))#33] Results [5]: [store channel AS channel#34, concat(store, s_store_id#25) AS id#35, MakeDecimal(sum(UnscaledValue(sales_price#8))#30,17,2) AS sales#36, MakeDecimal(sum(UnscaledValue(return_amt#10))#31,17,2) AS returns#37, (MakeDecimal(sum(UnscaledValue(profit#9))#32,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#11))#33,17,2)) AS profit#38] -(23) Scan parquet spark_catalog.default.catalog_sales +(23) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_catalog_page_sk#39, cs_ext_sales_price#40, cs_net_profit#41, cs_sold_date_sk#42] Batched: true Location: InMemoryFileIndex [] @@ -207,7 +207,7 @@ Condition : isnotnull(cs_catalog_page_sk#39) Input [4]: [cs_catalog_page_sk#39, cs_ext_sales_price#40, cs_net_profit#41, cs_sold_date_sk#42] Arguments: [page_sk#44, date_sk#45, sales_price#46, profit#47, return_amt#48, net_loss#49], [cs_catalog_page_sk#39 AS page_sk#44, cs_sold_date_sk#42 AS date_sk#45, cs_ext_sales_price#40 AS sales_price#46, cs_net_profit#41 AS profit#47, 0.00 AS return_amt#48, 0.00 AS net_loss#49] -(26) Scan parquet spark_catalog.default.catalog_returns +(26) CometScan parquet spark_catalog.default.catalog_returns Output [4]: [cr_catalog_page_sk#50, cr_return_amount#51, cr_net_loss#52, cr_returned_date_sk#53] Batched: true Location: InMemoryFileIndex [] @@ -239,7 +239,7 @@ Arguments: [date_sk#45], [d_date_sk#60], Inner, BuildRight Input [7]: [page_sk#44, date_sk#45, sales_price#46, profit#47, return_amt#48, net_loss#49, d_date_sk#60] Arguments: [page_sk#44, sales_price#46, profit#47, return_amt#48, net_loss#49], [page_sk#44, sales_price#46, profit#47, return_amt#48, net_loss#49] -(33) Scan parquet spark_catalog.default.catalog_page +(33) CometScan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#61, cp_catalog_page_id#62] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -282,7 +282,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#46)), sum(UnscaledValue(return_amt Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#46))#67, sum(UnscaledValue(return_amt#48))#68, sum(UnscaledValue(profit#47))#69, sum(UnscaledValue(net_loss#49))#70] Results [5]: [catalog channel AS channel#71, concat(catalog_page, cp_catalog_page_id#62) AS id#72, MakeDecimal(sum(UnscaledValue(sales_price#46))#67,17,2) AS sales#73, MakeDecimal(sum(UnscaledValue(return_amt#48))#68,17,2) AS returns#74, (MakeDecimal(sum(UnscaledValue(profit#47))#69,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#49))#70,17,2)) AS profit#75] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_web_site_sk#76, ws_ext_sales_price#77, ws_net_profit#78, ws_sold_date_sk#79] Batched: true Location: InMemoryFileIndex [] @@ -298,7 +298,7 @@ Condition : isnotnull(ws_web_site_sk#76) Input [4]: [ws_web_site_sk#76, ws_ext_sales_price#77, ws_net_profit#78, ws_sold_date_sk#79] Arguments: [wsr_web_site_sk#81, date_sk#82, sales_price#83, profit#84, return_amt#85, net_loss#86], [ws_web_site_sk#76 AS wsr_web_site_sk#81, ws_sold_date_sk#79 AS date_sk#82, ws_ext_sales_price#77 AS sales_price#83, ws_net_profit#78 AS profit#84, 0.00 AS return_amt#85, 0.00 AS net_loss#86] -(45) Scan parquet spark_catalog.default.web_returns +(45) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#87, wr_order_number#88, wr_return_amt#89, wr_net_loss#90, wr_returned_date_sk#91] Batched: true Location: InMemoryFileIndex [] @@ -309,7 +309,7 @@ ReadSchema: struct Input [2]: [ca_address_sk#1, ca_state#2] Condition : isnotnull(ca_address_sk#1) -(3) Scan parquet spark_catalog.default.customer +(3) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#3, c_current_addr_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -74,7 +74,7 @@ Arguments: [ca_address_sk#1], [c_current_addr_sk#4], Inner, BuildRight Input [4]: [ca_address_sk#1, ca_state#2, c_customer_sk#3, c_current_addr_sk#4] Arguments: [ca_state#2, c_customer_sk#3], [ca_state#2, c_customer_sk#3] -(8) Scan parquet spark_catalog.default.store_sales +(8) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -99,7 +99,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], Inner, BuildRight Input [5]: [ca_state#2, c_customer_sk#3, ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Arguments: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7], [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7] -(13) Scan parquet spark_catalog.default.date_dim +(13) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -127,7 +127,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [4]: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7, d_date_sk#9] Arguments: [ca_state#2, ss_item_sk#5], [ca_state#2, ss_item_sk#5] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#13, i_current_price#14, i_category#15] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -138,7 +138,7 @@ ReadSchema: struct Input [3]: [i_item_sk#13, i_current_price#14, i_category#15] Condition : ((isnotnull(i_current_price#14) AND isnotnull(i_category#15)) AND isnotnull(i_item_sk#13)) -(21) Scan parquet spark_catalog.default.item +(21) CometScan parquet spark_catalog.default.item Output [2]: [i_current_price#16, i_category#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -228,7 +228,7 @@ BroadcastExchange (43) +- CometScan parquet spark_catalog.default.date_dim (39) -(39) Scan parquet spark_catalog.default.date_dim +(39) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -260,7 +260,7 @@ Subquery:2 Hosting operator id = 40 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (44) -(44) Scan parquet spark_catalog.default.date_dim +(44) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#24, d_year#25, d_moy#26] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q64/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q64/explain.txt index 77a8f008c..4d8ac469c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q64/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q64/explain.txt @@ -174,7 +174,7 @@ +- ReusedExchange (162) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -190,7 +190,7 @@ Condition : (((((((isnotnull(ss_item_sk#1) AND isnotnull(ss_ticket_number#8)) AN Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] -(4) Scan parquet spark_catalog.default.store_returns +(4) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#14, sr_ticket_number#15, sr_returned_date_sk#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -222,7 +222,7 @@ Arguments: hashpartitioning(ss_item_sk#1, 5), ENSURE_REQUIREMENTS, CometNativeSh Input [11]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12], [ss_item_sk#1 ASC NULLS FIRST] -(11) Scan parquet spark_catalog.default.catalog_sales +(11) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19, cs_sold_date_sk#20] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -245,7 +245,7 @@ Arguments: hashpartitioning(cs_item_sk#17, cs_order_number#18, 5), ENSURE_REQUIR Input [3]: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19] Arguments: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19], [cs_item_sk#17 ASC NULLS FIRST, cs_order_number#18 ASC NULLS FIRST] -(16) Scan parquet spark_catalog.default.catalog_returns +(16) CometScan parquet spark_catalog.default.catalog_returns Output [6]: [cr_item_sk#21, cr_order_number#22, cr_refunded_cash#23, cr_reversed_charge#24, cr_store_credit#25, cr_returned_date_sk#26] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -312,7 +312,7 @@ Arguments: [ss_item_sk#1], [cs_item_sk#17], Inner Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12, cs_item_sk#17] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12], [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] -(31) Scan parquet spark_catalog.default.date_dim +(31) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#32, d_year#33] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -336,7 +336,7 @@ Arguments: [ss_sold_date_sk#12], [d_date_sk#32], Inner, BuildRight Input [13]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12, d_date_sk#32, d_year#33] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33], [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33] -(36) Scan parquet spark_catalog.default.store +(36) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#34, s_store_name#35, s_zip#36] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -360,7 +360,7 @@ Arguments: [ss_store_sk#6], [s_store_sk#34], Inner, BuildRight Input [14]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_sk#34, s_store_name#35, s_zip#36] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36], [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36] -(41) Scan parquet spark_catalog.default.customer +(41) CometScan parquet spark_catalog.default.customer Output [6]: [c_customer_sk#37, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, c_first_sales_date_sk#42] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -384,7 +384,7 @@ Arguments: [ss_customer_sk#2], [c_customer_sk#37], Inner, BuildRight Input [18]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_customer_sk#37, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, c_first_sales_date_sk#42] Arguments: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, c_first_sales_date_sk#42], [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, c_first_sales_date_sk#42] -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_year#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -420,7 +420,7 @@ Arguments: [c_first_shipto_date_sk#41], [d_date_sk#45], Inner, BuildRight Input [18]: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, d_year#44, d_date_sk#45, d_year#46] Arguments: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46], [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46] -(54) Scan parquet spark_catalog.default.customer_demographics +(54) CometScan parquet spark_catalog.default.customer_demographics Output [2]: [cd_demo_sk#47, cd_marital_status#48] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -456,7 +456,7 @@ Arguments: [c_current_cdemo_sk#38], [cd_demo_sk#49], Inner, NOT (cd_marital_stat Input [18]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46, cd_marital_status#48, cd_demo_sk#49, cd_marital_status#50] Arguments: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46], [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46] -(62) Scan parquet spark_catalog.default.promotion +(62) CometScan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#51] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -480,7 +480,7 @@ Arguments: [ss_promo_sk#7], [p_promo_sk#51], Inner, BuildRight Input [15]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46, p_promo_sk#51] Arguments: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46], [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46] -(67) Scan parquet spark_catalog.default.household_demographics +(67) CometScan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#52, hd_income_band_sk#53] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -516,7 +516,7 @@ Arguments: [c_current_hdemo_sk#39], [hd_demo_sk#54], Inner, BuildRight Input [15]: [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46, hd_income_band_sk#53, hd_demo_sk#54, hd_income_band_sk#55] Arguments: [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_addr_sk#40, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55], [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_addr_sk#40, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55] -(75) Scan parquet spark_catalog.default.customer_address +(75) CometScan parquet spark_catalog.default.customer_address Output [5]: [ca_address_sk#56, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -552,7 +552,7 @@ Arguments: [c_current_addr_sk#40], [ca_address_sk#61], Inner, BuildRight Input [21]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_addr_sk#40, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_address_sk#61, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65] Arguments: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65], [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65] -(83) Scan parquet spark_catalog.default.income_band +(83) CometScan parquet spark_catalog.default.income_band Output [1]: [ib_income_band_sk#66] Batched: true Location [not included in comparison]/{warehouse_dir}/income_band] @@ -588,7 +588,7 @@ Arguments: [hd_income_band_sk#55], [ib_income_band_sk#67], Inner, BuildRight Input [19]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, hd_income_band_sk#55, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ib_income_band_sk#67] Arguments: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65], [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65] -(91) Scan parquet spark_catalog.default.item +(91) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#68, i_current_price#69, i_color#70, i_product_name#71] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -634,7 +634,7 @@ Arguments: hashpartitioning(item_sk#77, store_name#78, store_zip#79, 5), ENSURE_ Input [17]: [product_name#76, item_sk#77, store_name#78, store_zip#79, b_street_number#80, b_streen_name#81, b_city#82, b_zip#83, c_street_number#84, c_street_name#85, c_city#86, c_zip#87, syear#88, cnt#89, s1#90, s2#91, s3#92] Arguments: [product_name#76, item_sk#77, store_name#78, store_zip#79, b_street_number#80, b_streen_name#81, b_city#82, b_zip#83, c_street_number#84, c_street_name#85, c_city#86, c_zip#87, syear#88, cnt#89, s1#90, s2#91, s3#92], [item_sk#77 ASC NULLS FIRST, store_name#78 ASC NULLS FIRST, store_zip#79 ASC NULLS FIRST] -(101) Scan parquet spark_catalog.default.store_sales +(101) CometScan parquet spark_catalog.default.store_sales Output [12]: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_ticket_number#100, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104] Batched: true Location: InMemoryFileIndex [] @@ -650,7 +650,7 @@ Condition : (((((((isnotnull(ss_item_sk#93) AND isnotnull(ss_ticket_number#100)) Input [12]: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_ticket_number#100, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104] Arguments: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_ticket_number#100, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104] -(104) Scan parquet spark_catalog.default.store_returns +(104) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#106, sr_ticket_number#107, sr_returned_date_sk#108] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -711,7 +711,7 @@ Arguments: [ss_item_sk#93], [cs_item_sk#109], Inner Input [12]: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104, cs_item_sk#109] Arguments: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104], [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104] -(118) Scan parquet spark_catalog.default.date_dim +(118) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#117, d_year#118] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -950,7 +950,7 @@ BroadcastExchange (177) +- CometScan parquet spark_catalog.default.date_dim (174) -(174) Scan parquet spark_catalog.default.date_dim +(174) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#32, d_year#33] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -975,7 +975,7 @@ BroadcastExchange (181) +- CometScan parquet spark_catalog.default.date_dim (178) -(178) Scan parquet spark_catalog.default.date_dim +(178) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#117, d_year#118] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q67a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q67a/explain.txt index 5ae73a0f7..c866b93d2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q67a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q67a/explain.txt @@ -72,7 +72,7 @@ TakeOrderedAndProject (71) +- ReusedExchange (57) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_quantity#3, ss_sales_price#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -102,7 +102,7 @@ Arguments: [ss_store_sk#1, ss_net_profit#2], [ss_store_sk#1, ss_net_profit#2] (9) ColumnarToRow [codegen id : 4] Input [2]: [ss_store_sk#1, ss_net_profit#2] -(10) Scan parquet spark_catalog.default.store +(10) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#7, s_county#8, s_state#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -116,7 +116,7 @@ Condition : isnotnull(s_store_sk#7) (12) ColumnarToRow [codegen id : 3] Input [3]: [s_store_sk#7, s_county#8, s_state#9] -(13) Scan parquet spark_catalog.default.store_sales +(13) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -128,7 +128,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Condition : isnotnull(ss_store_sk#10) -(15) Scan parquet spark_catalog.default.store +(15) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#14, s_state#15] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -349,7 +349,7 @@ BroadcastExchange (63) +- CometScan parquet spark_catalog.default.date_dim (59) -(59) Scan parquet spark_catalog.default.date_dim +(59) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q72/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q72/explain.txt index 8ac4aab45..45f7e2e66 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q72/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q72/explain.txt @@ -65,7 +65,7 @@ +- CometScan parquet spark_catalog.default.catalog_returns (53) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [8]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, cs_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -77,7 +77,7 @@ ReadSchema: struct date_add(d_d Input [10]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24, d_date_sk#28, d_date#29] Arguments: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] -(46) Scan parquet spark_catalog.default.promotion +(46) CometScan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#30] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -314,7 +314,7 @@ Arguments: hashpartitioning(cs_item_sk#4, cs_order_number#6, 5), ENSURE_REQUIREM Input [5]: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Arguments: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4 ASC NULLS FIRST, cs_order_number#6 ASC NULLS FIRST] -(53) Scan parquet spark_catalog.default.catalog_returns +(53) CometScan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#31, cr_order_number#32, cr_returned_date_sk#33] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -377,7 +377,7 @@ BroadcastExchange (69) +- CometScan parquet spark_catalog.default.date_dim (65) -(65) Scan parquet spark_catalog.default.date_dim +(65) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_date_sk#22, d_date#23, d_week_seq#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q74/explain.txt index 4072b2277..12d5134a0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q74/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q74/explain.txt @@ -69,7 +69,7 @@ +- ReusedExchange (58) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -80,7 +80,7 @@ ReadSchema: struct 0.00)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#14, c_customer_id#15, c_first_name#16, c_last_name#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -158,7 +158,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -77,7 +77,7 @@ Arguments: [ws_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3, d_date_sk#5] Arguments: [ws_item_sk#1, ws_net_paid#2], [ws_item_sk#1, ws_net_paid#2] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#7, i_class#8, i_category#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -211,7 +211,7 @@ BroadcastExchange (41) +- CometScan parquet spark_catalog.default.date_dim (37) -(37) Scan parquet spark_catalog.default.date_dim +(37) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q98/explain.txt index 9eba711c1..4d870a8b8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q98/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark3_5/q98/explain.txt @@ -25,7 +25,7 @@ +- CometScan parquet spark_catalog.default.date_dim (8) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ ReadSchema: struct Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -61,7 +61,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -143,7 +143,7 @@ BroadcastExchange (29) +- CometScan parquet spark_catalog.default.date_dim (25) -(25) Scan parquet spark_catalog.default.date_dim +(25) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q10a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q10a/explain.txt index fc449ff52..868b2f482 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q10a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q10a/explain.txt @@ -40,7 +40,7 @@ +- CometScan parquet spark_catalog.default.customer_demographics (30) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -51,14 +51,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -95,7 +95,7 @@ Left output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Right output [1]: [ss_customer_sk#4] Arguments: [c_customer_sk#1], [ss_customer_sk#4], LeftSemi, BuildRight -(12) Scan parquet spark_catalog.default.web_sales +(12) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -114,7 +114,7 @@ Arguments: [ws_sold_date_sk#11], [d_date_sk#13], Inner, BuildRight Input [3]: [ws_bill_customer_sk#10, ws_sold_date_sk#11, d_date_sk#13] Arguments: [customer_sk#14], [ws_bill_customer_sk#10 AS customer_sk#14] -(16) Scan parquet spark_catalog.default.catalog_sales +(16) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#15, cs_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -150,7 +150,7 @@ Arguments: [c_customer_sk#1], [customer_sk#14], LeftSemi, BuildRight Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Arguments: [c_current_cdemo_sk#2, c_current_addr_sk#3], [c_current_cdemo_sk#2, c_current_addr_sk#3] -(24) Scan parquet spark_catalog.default.customer_address +(24) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_county#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -178,7 +178,7 @@ Arguments: [c_current_addr_sk#3], [ca_address_sk#20], Inner, BuildRight Input [3]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#20] Arguments: [c_current_cdemo_sk#2], [c_current_cdemo_sk#2] -(30) Scan parquet spark_catalog.default.customer_demographics +(30) CometScan parquet spark_catalog.default.customer_demographics Output [9]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_education_status#25, cd_purchase_estimate#26, cd_credit_rating#27, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -233,7 +233,7 @@ BroadcastExchange (44) +- CometScan parquet spark_catalog.default.date_dim (40) -(40) Scan parquet spark_catalog.default.date_dim +(40) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q11/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q11/explain.txt index 2b4fa79cd..6f50d7fbd 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q11/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q11/explain.txt @@ -72,7 +72,7 @@ TakeOrderedAndProject (71) +- ReusedExchange (61) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -83,7 +83,7 @@ ReadSchema: struct 0.00)) -(18) Scan parquet spark_catalog.default.customer +(18) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#21, c_customer_id#22, c_first_name#23, c_last_name#24, c_preferred_cust_flag#25, c_birth_country#26, c_login#27, c_email_address#28] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -168,7 +168,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [ws_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -138,7 +138,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q14/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q14/explain.txt index 5c0b28c77..143f2c1b5 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q14/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q14/explain.txt @@ -87,7 +87,7 @@ TakeOrderedAndProject (86) +- CometScan parquet spark_catalog.default.date_dim (73) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -99,7 +99,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -110,7 +110,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -133,7 +133,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -169,7 +169,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -245,7 +245,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -312,7 +312,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -344,7 +344,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -397,7 +397,7 @@ Results [6]: [store AS channel#55, i_brand_id#40, i_class_id#41, i_category_id#4 Input [6]: [channel#55, i_brand_id#40, i_class_id#41, i_category_id#42, sales#56, number_sales#57] Condition : (isnotnull(sales#56) AND (cast(sales#56 as decimal(32,6)) > cast(Subquery scalar-subquery#58, [id=#59] as decimal(32,6)))) -(66) Scan parquet spark_catalog.default.store_sales +(66) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#60, ss_quantity#61, ss_list_price#62, ss_sold_date_sk#63] Batched: true Location: InMemoryFileIndex [] @@ -429,7 +429,7 @@ Arguments: [ss_item_sk#60], [i_item_sk#66], Inner, BuildRight Input [8]: [ss_item_sk#60, ss_quantity#61, ss_list_price#62, ss_sold_date_sk#63, i_item_sk#66, i_brand_id#67, i_class_id#68, i_category_id#69] Arguments: [ss_quantity#61, ss_list_price#62, ss_sold_date_sk#63, i_brand_id#67, i_class_id#68, i_category_id#69], [ss_quantity#61, ss_list_price#62, ss_sold_date_sk#63, i_brand_id#67, i_class_id#68, i_category_id#69] -(73) Scan parquet spark_catalog.default.date_dim +(73) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#70, d_week_seq#71] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -518,7 +518,7 @@ Subquery:1 Hosting operator id = 65 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (96) -(87) Scan parquet spark_catalog.default.store_sales +(87) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#85, ss_list_price#86, ss_sold_date_sk#87] Batched: true Location: InMemoryFileIndex [] @@ -537,7 +537,7 @@ Arguments: [ss_sold_date_sk#87], [d_date_sk#89], Inner, BuildRight Input [4]: [ss_quantity#85, ss_list_price#86, ss_sold_date_sk#87, d_date_sk#89] Arguments: [quantity#90, list_price#91], [ss_quantity#85 AS quantity#90, ss_list_price#86 AS list_price#91] -(91) Scan parquet spark_catalog.default.catalog_sales +(91) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#92, cs_list_price#93, cs_sold_date_sk#94] Batched: true Location: InMemoryFileIndex [] @@ -556,7 +556,7 @@ Arguments: [cs_sold_date_sk#94], [d_date_sk#96], Inner, BuildRight Input [4]: [cs_quantity#92, cs_list_price#93, cs_sold_date_sk#94, d_date_sk#96] Arguments: [quantity#97, list_price#98], [cs_quantity#92 AS quantity#97, cs_list_price#93 AS list_price#98] -(95) Scan parquet spark_catalog.default.web_sales +(95) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#99, ws_list_price#100, ws_sold_date_sk#101] Batched: true Location: InMemoryFileIndex [] @@ -615,7 +615,7 @@ BroadcastExchange (108) +- CometScan parquet spark_catalog.default.date_dim (104) -(104) Scan parquet spark_catalog.default.date_dim +(104) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -646,7 +646,7 @@ Subquery:7 Hosting operator id = 104 Hosting Expression = Subquery scalar-subque +- CometScan parquet spark_catalog.default.date_dim (109) -(109) Scan parquet spark_catalog.default.date_dim +(109) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#112, d_year#113, d_moy#114, d_dom#115] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -672,7 +672,7 @@ BroadcastExchange (117) +- CometScan parquet spark_catalog.default.date_dim (113) -(113) Scan parquet spark_catalog.default.date_dim +(113) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#116] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -712,7 +712,7 @@ BroadcastExchange (122) +- CometScan parquet spark_catalog.default.date_dim (118) -(118) Scan parquet spark_catalog.default.date_dim +(118) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#70, d_week_seq#71] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -743,7 +743,7 @@ Subquery:16 Hosting operator id = 118 Hosting Expression = Subquery scalar-subqu +- CometScan parquet spark_catalog.default.date_dim (123) -(123) Scan parquet spark_catalog.default.date_dim +(123) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#117, d_year#118, d_moy#119, d_dom#120] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q14a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q14a/explain.txt index cd6293c6f..14e2e96b2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q14a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q14a/explain.txt @@ -125,7 +125,7 @@ TakeOrderedAndProject (124) +- ReusedExchange (115) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -137,7 +137,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -148,7 +148,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -171,7 +171,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -207,7 +207,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -283,7 +283,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -350,7 +350,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -382,7 +382,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -435,7 +435,7 @@ Results [6]: [store AS channel#54, i_brand_id#40, i_class_id#41, i_category_id#4 Input [6]: [channel#54, i_brand_id#40, i_class_id#41, i_category_id#42, sales#55, number_sales#56] Condition : (isnotnull(sales#55) AND (cast(sales#55 as decimal(32,6)) > cast(Subquery scalar-subquery#57, [id=#58] as decimal(32,6)))) -(66) Scan parquet spark_catalog.default.catalog_sales +(66) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#59, cs_quantity#60, cs_list_price#61, cs_sold_date_sk#62] Batched: true Location: InMemoryFileIndex [] @@ -504,7 +504,7 @@ Results [6]: [catalog AS channel#78, i_brand_id#66, i_class_id#67, i_category_id Input [6]: [channel#78, i_brand_id#66, i_class_id#67, i_category_id#68, sales#79, number_sales#80] Condition : (isnotnull(sales#79) AND (cast(sales#79 as decimal(32,6)) > cast(ReusedSubquery Subquery scalar-subquery#57, [id=#58] as decimal(32,6)))) -(81) Scan parquet spark_catalog.default.web_sales +(81) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#81, ws_quantity#82, ws_list_price#83, ws_sold_date_sk#84] Batched: true Location: InMemoryFileIndex [] @@ -754,7 +754,7 @@ Subquery:1 Hosting operator id = 65 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (137) -(125) Scan parquet spark_catalog.default.store_sales +(125) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#207, ss_list_price#208, ss_sold_date_sk#209] Batched: true Location: InMemoryFileIndex [] @@ -773,14 +773,14 @@ Arguments: [ss_sold_date_sk#209], [d_date_sk#211], Inner, BuildRight Input [4]: [ss_quantity#207, ss_list_price#208, ss_sold_date_sk#209, d_date_sk#211] Arguments: [quantity#212, list_price#213], [ss_quantity#207 AS quantity#212, ss_list_price#208 AS list_price#213] -(129) Scan parquet spark_catalog.default.catalog_sales +(129) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#214, cs_list_price#215, cs_sold_date_sk#216] Batched: true Location: InMemoryFileIndex [] PartitionFilters: [isnotnull(cs_sold_date_sk#216), dynamicpruningexpression(cs_sold_date_sk#216 IN dynamicpruning#217)] ReadSchema: struct -(130) Scan parquet spark_catalog.default.date_dim +(130) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#218, d_year#219] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -808,7 +808,7 @@ Arguments: [cs_sold_date_sk#216], [d_date_sk#218], Inner, BuildRight Input [4]: [cs_quantity#214, cs_list_price#215, cs_sold_date_sk#216, d_date_sk#218] Arguments: [quantity#220, list_price#221], [cs_quantity#214 AS quantity#220, cs_list_price#215 AS list_price#221] -(136) Scan parquet spark_catalog.default.web_sales +(136) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#222, ws_list_price#223, ws_sold_date_sk#224] Batched: true Location: InMemoryFileIndex [] @@ -863,7 +863,7 @@ BroadcastExchange (149) +- CometScan parquet spark_catalog.default.date_dim (145) -(145) Scan parquet spark_catalog.default.date_dim +(145) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#218, d_year#219] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -895,7 +895,7 @@ BroadcastExchange (154) +- CometScan parquet spark_catalog.default.date_dim (150) -(150) Scan parquet spark_catalog.default.date_dim +(150) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -925,7 +925,7 @@ BroadcastExchange (159) +- CometScan parquet spark_catalog.default.date_dim (155) -(155) Scan parquet spark_catalog.default.date_dim +(155) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#235] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q18a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q18a/explain.txt index 36ccc06de..9aea34f5e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q18a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q18a/explain.txt @@ -148,7 +148,7 @@ TakeOrderedAndProject (147) +- CometScan parquet spark_catalog.default.item (137) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -160,7 +160,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Condition : isnotnull(cs_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [cs_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -138,7 +138,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q22/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q22/explain.txt index 2810779ed..34d510fc7 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q22/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q22/explain.txt @@ -25,7 +25,7 @@ TakeOrderedAndProject (24) +- CometScan parquet spark_catalog.default.warehouse (15) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ ReadSchema: struct Input [3]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3] Condition : isnotnull(inv_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -65,7 +65,7 @@ Arguments: [inv_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3, d_date_sk#5] Arguments: [inv_item_sk#1, inv_quantity_on_hand#2], [inv_item_sk#1, inv_quantity_on_hand#2] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#7, i_brand#8, i_class#9, i_category#10, i_product_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -92,7 +92,7 @@ Arguments: [inv_quantity_on_hand#2, i_brand#8, i_class#9, i_category#10, i_produ (14) ColumnarToRow [codegen id : 2] Input [5]: [inv_quantity_on_hand#2, i_brand#8, i_class#9, i_category#10, i_product_name#11] -(15) Scan parquet spark_catalog.default.warehouse +(15) CometScan parquet spark_catalog.default.warehouse Output: [] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -149,7 +149,7 @@ BroadcastExchange (29) +- CometScan parquet spark_catalog.default.date_dim (25) -(25) Scan parquet spark_catalog.default.date_dim +(25) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q22a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q22a/explain.txt index ac15878d5..4cabf9877 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q22a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q22a/explain.txt @@ -47,7 +47,7 @@ TakeOrderedAndProject (46) +- ReusedExchange (40) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -59,7 +59,7 @@ ReadSchema: struct= 15) AND (cnt#17 <= 20)) -(25) Scan parquet spark_catalog.default.customer +(25) CometScan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -192,7 +192,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_dom#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35/explain.txt index 790000085..5933ea5fa 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35/explain.txt @@ -45,7 +45,7 @@ TakeOrderedAndProject (44) +- CometScan parquet spark_catalog.default.customer_demographics (35) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -56,14 +56,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#12, ws_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ Right keys [1]: [ws_bill_customer_sk#12] Join type: ExistenceJoin(exists#2) Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -175,7 +175,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(29) Scan parquet spark_catalog.default.customer_address +(29) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_state#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -203,7 +203,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#4, ca_state#21] Input [4]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#20, ca_state#21] -(35) Scan parquet spark_catalog.default.customer_demographics +(35) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_dep_count#25, cd_dep_employed_count#26, cd_dep_college_count#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35a/explain.txt index 8eafc64ff..df30176f0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35a/explain.txt @@ -39,7 +39,7 @@ TakeOrderedAndProject (38) +- CometScan parquet spark_catalog.default.customer_demographics (29) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -50,14 +50,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_qoy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -94,7 +94,7 @@ Left output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Right output [1]: [ss_customer_sk#4] Arguments: [c_customer_sk#1], [ss_customer_sk#4], LeftSemi, BuildRight -(12) Scan parquet spark_catalog.default.web_sales +(12) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -113,7 +113,7 @@ Arguments: [ws_sold_date_sk#11], [d_date_sk#13], Inner, BuildRight Input [3]: [ws_bill_customer_sk#10, ws_sold_date_sk#11, d_date_sk#13] Arguments: [customsk#14], [ws_bill_customer_sk#10 AS customsk#14] -(16) Scan parquet spark_catalog.default.catalog_sales +(16) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#15, cs_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -149,7 +149,7 @@ Arguments: [c_customer_sk#1], [customsk#14], LeftSemi, BuildRight Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Arguments: [c_current_cdemo_sk#2, c_current_addr_sk#3], [c_current_cdemo_sk#2, c_current_addr_sk#3] -(24) Scan parquet spark_catalog.default.customer_address +(24) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_state#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -173,7 +173,7 @@ Arguments: [c_current_addr_sk#3], [ca_address_sk#20], Inner, BuildRight Input [4]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#20, ca_state#21] Arguments: [c_current_cdemo_sk#2, ca_state#21], [c_current_cdemo_sk#2, ca_state#21] -(29) Scan parquet spark_catalog.default.customer_demographics +(29) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_dep_count#25, cd_dep_employed_count#26, cd_dep_college_count#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -232,7 +232,7 @@ BroadcastExchange (43) +- CometScan parquet spark_catalog.default.date_dim (39) -(39) Scan parquet spark_catalog.default.date_dim +(39) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_qoy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q36a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q36a/explain.txt index 7b3ae5c67..17aa8a635 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q36a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q36a/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- ReusedExchange (29) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -55,7 +55,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.store_sales +(3) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -82,7 +82,7 @@ Arguments: [i_item_sk#1], [ss_item_sk#4], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -106,7 +106,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -261,7 +261,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q49/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q49/explain.txt index 01c4c5583..138876bfc 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q49/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q49/explain.txt @@ -78,7 +78,7 @@ TakeOrderedAndProject (77) +- ReusedExchange (59) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [6]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_net_profit#5, ws_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_so Input [5]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] -(5) Scan parquet spark_catalog.default.web_returns +(5) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11, wr_returned_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -122,7 +122,7 @@ Arguments: [ws_order_number#2, ws_item_sk#1], [wr_order_number#9, wr_item_sk#8], Input [9]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11] Arguments: [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11], [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -199,7 +199,7 @@ Condition : ((return_rank#35 <= 10) OR (currency_rank#36 <= 10)) Output [5]: [web AS channel#37, item#32, return_ratio#33, return_rank#35, currency_rank#36] Input [5]: [item#32, return_ratio#33, currency_ratio#34, return_rank#35, currency_rank#36] -(27) Scan parquet spark_catalog.default.catalog_sales +(27) CometScan parquet spark_catalog.default.catalog_sales Output [6]: [cs_item_sk#38, cs_order_number#39, cs_quantity#40, cs_net_paid#41, cs_net_profit#42, cs_sold_date_sk#43] Batched: true Location: InMemoryFileIndex [] @@ -219,7 +219,7 @@ Arguments: [cs_item_sk#38, cs_order_number#39, cs_quantity#40, cs_net_paid#41, c Input [5]: [cs_item_sk#38, cs_order_number#39, cs_quantity#40, cs_net_paid#41, cs_sold_date_sk#43] Arguments: [cs_item_sk#38, cs_order_number#39, cs_quantity#40, cs_net_paid#41, cs_sold_date_sk#43] -(31) Scan parquet spark_catalog.default.catalog_returns +(31) CometScan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#45, cr_order_number#46, cr_return_quantity#47, cr_return_amount#48, cr_returned_date_sk#49] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -304,7 +304,7 @@ Condition : ((return_rank#70 <= 10) OR (currency_rank#71 <= 10)) Output [5]: [catalog AS channel#72, item#67, return_ratio#68, return_rank#70, currency_rank#71] Input [5]: [item#67, return_ratio#68, currency_ratio#69, return_rank#70, currency_rank#71] -(50) Scan parquet spark_catalog.default.store_sales +(50) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#73, ss_ticket_number#74, ss_quantity#75, ss_net_paid#76, ss_net_profit#77, ss_sold_date_sk#78] Batched: true Location: InMemoryFileIndex [] @@ -324,7 +324,7 @@ Arguments: [ss_item_sk#73, ss_ticket_number#74, ss_quantity#75, ss_net_paid#76, Input [5]: [ss_item_sk#73, ss_ticket_number#74, ss_quantity#75, ss_net_paid#76, ss_sold_date_sk#78] Arguments: [ss_item_sk#73, ss_ticket_number#74, ss_quantity#75, ss_net_paid#76, ss_sold_date_sk#78] -(54) Scan parquet spark_catalog.default.store_returns +(54) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#80, sr_ticket_number#81, sr_return_quantity#82, sr_return_amt#83, sr_returned_date_sk#84] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -443,7 +443,7 @@ BroadcastExchange (82) +- CometScan parquet spark_catalog.default.date_dim (78) -(78) Scan parquet spark_catalog.default.date_dim +(78) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q51a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q51a/explain.txt index a5ad8c293..af53d08cc 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q51a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q51a/explain.txt @@ -71,7 +71,7 @@ TakeOrderedAndProject (70) +- ReusedExchange (60) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -83,7 +83,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -203,7 +203,7 @@ Arguments: hashpartitioning(item_sk#11, d_date#6, 5), ENSURE_REQUIREMENTS, [plan Input [3]: [item_sk#11, d_date#6, cume_sales#24] Arguments: [item_sk#11 ASC NULLS FIRST, d_date#6 ASC NULLS FIRST], false, 0 -(29) Scan parquet spark_catalog.default.store_sales +(29) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#25, ss_sales_price#26, ss_sold_date_sk#27] Batched: true Location: InMemoryFileIndex [] @@ -406,7 +406,7 @@ BroadcastExchange (75) +- CometScan parquet spark_catalog.default.date_dim (71) -(71) Scan parquet spark_catalog.default.date_dim +(71) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q57/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q57/explain.txt index e361df2d1..ad7a09b49 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q57/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q57/explain.txt @@ -46,7 +46,7 @@ TakeOrderedAndProject (45) +- ReusedExchange (38) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#1, i_brand#2, i_category#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -57,7 +57,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.catalog_sales +(3) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -82,7 +82,7 @@ Arguments: [i_item_sk#1], [cs_item_sk#5], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -106,7 +106,7 @@ Arguments: [cs_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.call_center +(13) CometScan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#12, cc_name#13] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] @@ -261,7 +261,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q5a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q5a/explain.txt index 9d660d776..4c45b9d4d 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q5a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q5a/explain.txt @@ -84,7 +84,7 @@ TakeOrderedAndProject (83) +- ReusedExchange (74) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ Condition : isnotnull(ss_store_sk#1) Input [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Arguments: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11], [ss_store_sk#1 AS store_sk#6, ss_sold_date_sk#4 AS date_sk#7, ss_ext_sales_price#2 AS sales_price#8, ss_net_profit#3 AS profit#9, 0.00 AS return_amt#10, 0.00 AS net_loss#11] -(4) Scan parquet spark_catalog.default.store_returns +(4) CometScan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -120,7 +120,7 @@ Arguments: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, n Child 0 Input [6]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11] Child 1 Input [6]: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, net_loss#21] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -148,7 +148,7 @@ Arguments: [date_sk#7], [d_date_sk#22], Inner, BuildRight Input [7]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11, d_date_sk#22] Arguments: [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11], [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11] -(14) Scan parquet spark_catalog.default.store +(14) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#24, s_store_id#25] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -193,7 +193,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#8)), sum(UnscaledValue(return_amt# Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#8))#34, sum(UnscaledValue(return_amt#10))#35, sum(UnscaledValue(profit#9))#36, sum(UnscaledValue(net_loss#11))#37] Results [5]: [store channel AS channel#38, concat(store, s_store_id#25) AS id#39, MakeDecimal(sum(UnscaledValue(sales_price#8))#34,17,2) AS sales#40, MakeDecimal(sum(UnscaledValue(return_amt#10))#35,17,2) AS returns#41, (MakeDecimal(sum(UnscaledValue(profit#9))#36,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#11))#37,17,2)) AS profit#42] -(23) Scan parquet spark_catalog.default.catalog_sales +(23) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_catalog_page_sk#43, cs_ext_sales_price#44, cs_net_profit#45, cs_sold_date_sk#46] Batched: true Location: InMemoryFileIndex [] @@ -209,7 +209,7 @@ Condition : isnotnull(cs_catalog_page_sk#43) Input [4]: [cs_catalog_page_sk#43, cs_ext_sales_price#44, cs_net_profit#45, cs_sold_date_sk#46] Arguments: [page_sk#48, date_sk#49, sales_price#50, profit#51, return_amt#52, net_loss#53], [cs_catalog_page_sk#43 AS page_sk#48, cs_sold_date_sk#46 AS date_sk#49, cs_ext_sales_price#44 AS sales_price#50, cs_net_profit#45 AS profit#51, 0.00 AS return_amt#52, 0.00 AS net_loss#53] -(26) Scan parquet spark_catalog.default.catalog_returns +(26) CometScan parquet spark_catalog.default.catalog_returns Output [4]: [cr_catalog_page_sk#54, cr_return_amount#55, cr_net_loss#56, cr_returned_date_sk#57] Batched: true Location: InMemoryFileIndex [] @@ -241,7 +241,7 @@ Arguments: [date_sk#49], [d_date_sk#64], Inner, BuildRight Input [7]: [page_sk#48, date_sk#49, sales_price#50, profit#51, return_amt#52, net_loss#53, d_date_sk#64] Arguments: [page_sk#48, sales_price#50, profit#51, return_amt#52, net_loss#53], [page_sk#48, sales_price#50, profit#51, return_amt#52, net_loss#53] -(33) Scan parquet spark_catalog.default.catalog_page +(33) CometScan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#65, cp_catalog_page_id#66] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -286,7 +286,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#50)), sum(UnscaledValue(return_amt Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#50))#75, sum(UnscaledValue(return_amt#52))#76, sum(UnscaledValue(profit#51))#77, sum(UnscaledValue(net_loss#53))#78] Results [5]: [catalog channel AS channel#79, concat(catalog_page, cp_catalog_page_id#66) AS id#80, MakeDecimal(sum(UnscaledValue(sales_price#50))#75,17,2) AS sales#81, MakeDecimal(sum(UnscaledValue(return_amt#52))#76,17,2) AS returns#82, (MakeDecimal(sum(UnscaledValue(profit#51))#77,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#53))#78,17,2)) AS profit#83] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_web_site_sk#84, ws_ext_sales_price#85, ws_net_profit#86, ws_sold_date_sk#87] Batched: true Location: InMemoryFileIndex [] @@ -302,7 +302,7 @@ Condition : isnotnull(ws_web_site_sk#84) Input [4]: [ws_web_site_sk#84, ws_ext_sales_price#85, ws_net_profit#86, ws_sold_date_sk#87] Arguments: [wsr_web_site_sk#89, date_sk#90, sales_price#91, profit#92, return_amt#93, net_loss#94], [ws_web_site_sk#84 AS wsr_web_site_sk#89, ws_sold_date_sk#87 AS date_sk#90, ws_ext_sales_price#85 AS sales_price#91, ws_net_profit#86 AS profit#92, 0.00 AS return_amt#93, 0.00 AS net_loss#94] -(45) Scan parquet spark_catalog.default.web_returns +(45) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#95, wr_order_number#96, wr_return_amt#97, wr_net_loss#98, wr_returned_date_sk#99] Batched: true Location: InMemoryFileIndex [] @@ -313,7 +313,7 @@ ReadSchema: struct Input [2]: [ca_address_sk#1, ca_state#2] Condition : isnotnull(ca_address_sk#1) -(3) Scan parquet spark_catalog.default.customer +(3) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#3, c_current_addr_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -76,7 +76,7 @@ Arguments: [ca_address_sk#1], [c_current_addr_sk#4], Inner, BuildRight Input [4]: [ca_address_sk#1, ca_state#2, c_customer_sk#3, c_current_addr_sk#4] Arguments: [ca_state#2, c_customer_sk#3], [ca_state#2, c_customer_sk#3] -(8) Scan parquet spark_catalog.default.store_sales +(8) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -101,7 +101,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], Inner, BuildRight Input [5]: [ca_state#2, c_customer_sk#3, ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Arguments: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7], [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7] -(13) Scan parquet spark_catalog.default.date_dim +(13) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -132,7 +132,7 @@ Arguments: [ca_state#2, ss_item_sk#5], [ca_state#2, ss_item_sk#5] (19) ColumnarToRow [codegen id : 4] Input [2]: [ca_state#2, ss_item_sk#5] -(20) Scan parquet spark_catalog.default.item +(20) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#13, i_current_price#14, i_category#15] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -146,7 +146,7 @@ Condition : ((isnotnull(i_current_price#14) AND isnotnull(i_category#15)) AND is (22) ColumnarToRow [codegen id : 3] Input [3]: [i_item_sk#13, i_current_price#14, i_category#15] -(23) Scan parquet spark_catalog.default.item +(23) CometScan parquet spark_catalog.default.item Output [2]: [i_current_price#16, i_category#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -246,7 +246,7 @@ BroadcastExchange (45) +- CometScan parquet spark_catalog.default.date_dim (41) -(41) Scan parquet spark_catalog.default.date_dim +(41) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -280,7 +280,7 @@ Subquery:3 Hosting operator id = 41 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (46) -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#29, d_year#30, d_moy#31] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q64/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q64/explain.txt index f144287f2..37e231910 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q64/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q64/explain.txt @@ -182,7 +182,7 @@ +- ReusedExchange (170) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -198,7 +198,7 @@ Condition : (((((((isnotnull(ss_item_sk#1) AND isnotnull(ss_ticket_number#8)) AN Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] -(4) Scan parquet spark_catalog.default.store_returns +(4) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#14, sr_ticket_number#15, sr_returned_date_sk#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -233,7 +233,7 @@ Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_add (11) ColumnarToRow [codegen id : 1] Input [11]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] -(12) Scan parquet spark_catalog.default.catalog_sales +(12) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19, cs_sold_date_sk#20] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -256,7 +256,7 @@ Arguments: hashpartitioning(cs_item_sk#17, cs_order_number#18, 5), ENSURE_REQUIR Input [3]: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19] Arguments: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19], [cs_item_sk#17 ASC NULLS FIRST, cs_order_number#18 ASC NULLS FIRST] -(17) Scan parquet spark_catalog.default.catalog_returns +(17) CometScan parquet spark_catalog.default.catalog_returns Output [6]: [cr_item_sk#21, cr_order_number#22, cr_refunded_cash#23, cr_reversed_charge#24, cr_store_credit#25, cr_returned_date_sk#26] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -344,7 +344,7 @@ Join condition: None Output [11]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38] Input [13]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12, d_date_sk#37, d_year#38] -(36) Scan parquet spark_catalog.default.store +(36) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#39, s_store_name#40, s_zip#41] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -372,7 +372,7 @@ Join condition: None Output [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41] Input [14]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_sk#39, s_store_name#40, s_zip#41] -(42) Scan parquet spark_catalog.default.customer +(42) CometScan parquet spark_catalog.default.customer Output [6]: [c_customer_sk#42, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, c_first_shipto_date_sk#46, c_first_sales_date_sk#47] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -400,7 +400,7 @@ Join condition: None Output [16]: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, c_first_shipto_date_sk#46, c_first_sales_date_sk#47] Input [18]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_customer_sk#42, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, c_first_shipto_date_sk#46, c_first_sales_date_sk#47] -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#48, d_year#49] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -441,7 +441,7 @@ Join condition: None Output [16]: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51] Input [18]: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, c_first_shipto_date_sk#46, d_year#49, d_date_sk#50, d_year#51] -(57) Scan parquet spark_catalog.default.customer_demographics +(57) CometScan parquet spark_catalog.default.customer_demographics Output [2]: [cd_demo_sk#52, cd_marital_status#53] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -482,7 +482,7 @@ Join condition: NOT (cd_marital_status#53 = cd_marital_status#55) Output [14]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51] Input [18]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51, cd_marital_status#53, cd_demo_sk#54, cd_marital_status#55] -(66) Scan parquet spark_catalog.default.promotion +(66) CometScan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#56] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -510,7 +510,7 @@ Join condition: None Output [13]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51] Input [15]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51, p_promo_sk#56] -(72) Scan parquet spark_catalog.default.household_demographics +(72) CometScan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#57, hd_income_band_sk#58] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -551,7 +551,7 @@ Join condition: None Output [13]: [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_addr_sk#45, d_year#49, d_year#51, hd_income_band_sk#58, hd_income_band_sk#60] Input [15]: [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51, hd_income_band_sk#58, hd_demo_sk#59, hd_income_band_sk#60] -(81) Scan parquet spark_catalog.default.customer_address +(81) CometScan parquet spark_catalog.default.customer_address Output [5]: [ca_address_sk#61, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -592,7 +592,7 @@ Join condition: None Output [19]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, d_year#49, d_year#51, hd_income_band_sk#58, hd_income_band_sk#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ca_street_number#67, ca_street_name#68, ca_city#69, ca_zip#70] Input [21]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_addr_sk#45, d_year#49, d_year#51, hd_income_band_sk#58, hd_income_band_sk#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ca_address_sk#66, ca_street_number#67, ca_street_name#68, ca_city#69, ca_zip#70] -(90) Scan parquet spark_catalog.default.income_band +(90) CometScan parquet spark_catalog.default.income_band Output [1]: [ib_income_band_sk#71] Batched: true Location [not included in comparison]/{warehouse_dir}/income_band] @@ -633,7 +633,7 @@ Join condition: None Output [17]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, d_year#49, d_year#51, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ca_street_number#67, ca_street_name#68, ca_city#69, ca_zip#70] Input [19]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, d_year#49, d_year#51, hd_income_band_sk#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ca_street_number#67, ca_street_name#68, ca_city#69, ca_zip#70, ib_income_band_sk#72] -(99) Scan parquet spark_catalog.default.item +(99) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#73, i_current_price#74, i_color#75, i_product_name#76] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -687,7 +687,7 @@ Arguments: hashpartitioning(item_sk#90, store_name#91, store_zip#92, 5), ENSURE_ Input [17]: [product_name#89, item_sk#90, store_name#91, store_zip#92, b_street_number#93, b_streen_name#94, b_city#95, b_zip#96, c_street_number#97, c_street_name#98, c_city#99, c_zip#100, syear#101, cnt#102, s1#103, s2#104, s3#105] Arguments: [item_sk#90 ASC NULLS FIRST, store_name#91 ASC NULLS FIRST, store_zip#92 ASC NULLS FIRST], false, 0 -(110) Scan parquet spark_catalog.default.store_sales +(110) CometScan parquet spark_catalog.default.store_sales Output [12]: [ss_item_sk#106, ss_customer_sk#107, ss_cdemo_sk#108, ss_hdemo_sk#109, ss_addr_sk#110, ss_store_sk#111, ss_promo_sk#112, ss_ticket_number#113, ss_wholesale_cost#114, ss_list_price#115, ss_coupon_amt#116, ss_sold_date_sk#117] Batched: true Location: InMemoryFileIndex [] @@ -703,7 +703,7 @@ Condition : (((((((isnotnull(ss_item_sk#106) AND isnotnull(ss_ticket_number#113) Input [12]: [ss_item_sk#106, ss_customer_sk#107, ss_cdemo_sk#108, ss_hdemo_sk#109, ss_addr_sk#110, ss_store_sk#111, ss_promo_sk#112, ss_ticket_number#113, ss_wholesale_cost#114, ss_list_price#115, ss_coupon_amt#116, ss_sold_date_sk#117] Arguments: [ss_item_sk#106, ss_customer_sk#107, ss_cdemo_sk#108, ss_hdemo_sk#109, ss_addr_sk#110, ss_store_sk#111, ss_promo_sk#112, ss_ticket_number#113, ss_wholesale_cost#114, ss_list_price#115, ss_coupon_amt#116, ss_sold_date_sk#117] -(113) Scan parquet spark_catalog.default.store_returns +(113) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#119, sr_ticket_number#120, sr_returned_date_sk#121] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -1017,7 +1017,7 @@ BroadcastExchange (185) +- CometScan parquet spark_catalog.default.date_dim (182) -(182) Scan parquet spark_catalog.default.date_dim +(182) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#37, d_year#38] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -1042,7 +1042,7 @@ BroadcastExchange (189) +- CometScan parquet spark_catalog.default.date_dim (186) -(186) Scan parquet spark_catalog.default.date_dim +(186) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#132, d_year#133] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q67a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q67a/explain.txt index 25e832248..49a984b30 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q67a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q67a/explain.txt @@ -72,7 +72,7 @@ TakeOrderedAndProject (71) +- ReusedExchange (58) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_quantity#3, ss_sales_price#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -102,7 +102,7 @@ Arguments: [ss_store_sk#1, ss_net_profit#2], [ss_store_sk#1, ss_net_profit#2] (9) ColumnarToRow [codegen id : 5] Input [2]: [ss_store_sk#1, ss_net_profit#2] -(10) Scan parquet spark_catalog.default.store +(10) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#7, s_county#8, s_state#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -116,7 +116,7 @@ Condition : isnotnull(s_store_sk#7) (12) ColumnarToRow [codegen id : 4] Input [3]: [s_store_sk#7, s_county#8, s_state#9] -(13) Scan parquet spark_catalog.default.store_sales +(13) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -128,7 +128,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Condition : isnotnull(ss_store_sk#10) -(15) Scan parquet spark_catalog.default.store +(15) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#14, s_state#15] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -353,7 +353,7 @@ BroadcastExchange (63) +- CometScan parquet spark_catalog.default.date_dim (59) -(59) Scan parquet spark_catalog.default.date_dim +(59) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q72/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q72/explain.txt index 8ac4aab45..45f7e2e66 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q72/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q72/explain.txt @@ -65,7 +65,7 @@ +- CometScan parquet spark_catalog.default.catalog_returns (53) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [8]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, cs_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -77,7 +77,7 @@ ReadSchema: struct date_add(d_d Input [10]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24, d_date_sk#28, d_date#29] Arguments: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] -(46) Scan parquet spark_catalog.default.promotion +(46) CometScan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#30] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -314,7 +314,7 @@ Arguments: hashpartitioning(cs_item_sk#4, cs_order_number#6, 5), ENSURE_REQUIREM Input [5]: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Arguments: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4 ASC NULLS FIRST, cs_order_number#6 ASC NULLS FIRST] -(53) Scan parquet spark_catalog.default.catalog_returns +(53) CometScan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#31, cr_order_number#32, cr_returned_date_sk#33] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -377,7 +377,7 @@ BroadcastExchange (69) +- CometScan parquet spark_catalog.default.date_dim (65) -(65) Scan parquet spark_catalog.default.date_dim +(65) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_date_sk#22, d_date#23, d_week_seq#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q74/explain.txt index 0ae3bf650..17b7fb0b8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q74/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q74/explain.txt @@ -72,7 +72,7 @@ TakeOrderedAndProject (71) +- ReusedExchange (61) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -83,7 +83,7 @@ ReadSchema: struct 0.00)) -(18) Scan parquet spark_catalog.default.customer +(18) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#16, c_customer_id#17, c_first_name#18, c_last_name#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -168,7 +168,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -77,7 +77,7 @@ Arguments: [ws_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3, d_date_sk#5] Arguments: [ws_item_sk#1, ws_net_paid#2], [ws_item_sk#1, ws_net_paid#2] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#7, i_class#8, i_category#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -228,7 +228,7 @@ BroadcastExchange (41) +- CometScan parquet spark_catalog.default.date_dim (37) -(37) Scan parquet spark_catalog.default.date_dim +(37) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q98/explain.txt index 536a8a12d..61ef88e69 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q98/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q98/explain.txt @@ -25,7 +25,7 @@ +- CometScan parquet spark_catalog.default.date_dim (8) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ ReadSchema: struct Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -61,7 +61,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -147,7 +147,7 @@ BroadcastExchange (29) +- CometScan parquet spark_catalog.default.date_dim (25) -(25) Scan parquet spark_catalog.default.date_dim +(25) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q10a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q10a/explain.txt index fc449ff52..868b2f482 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q10a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q10a/explain.txt @@ -40,7 +40,7 @@ +- CometScan parquet spark_catalog.default.customer_demographics (30) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -51,14 +51,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -95,7 +95,7 @@ Left output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Right output [1]: [ss_customer_sk#4] Arguments: [c_customer_sk#1], [ss_customer_sk#4], LeftSemi, BuildRight -(12) Scan parquet spark_catalog.default.web_sales +(12) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -114,7 +114,7 @@ Arguments: [ws_sold_date_sk#11], [d_date_sk#13], Inner, BuildRight Input [3]: [ws_bill_customer_sk#10, ws_sold_date_sk#11, d_date_sk#13] Arguments: [customer_sk#14], [ws_bill_customer_sk#10 AS customer_sk#14] -(16) Scan parquet spark_catalog.default.catalog_sales +(16) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#15, cs_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -150,7 +150,7 @@ Arguments: [c_customer_sk#1], [customer_sk#14], LeftSemi, BuildRight Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Arguments: [c_current_cdemo_sk#2, c_current_addr_sk#3], [c_current_cdemo_sk#2, c_current_addr_sk#3] -(24) Scan parquet spark_catalog.default.customer_address +(24) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_county#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -178,7 +178,7 @@ Arguments: [c_current_addr_sk#3], [ca_address_sk#20], Inner, BuildRight Input [3]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#20] Arguments: [c_current_cdemo_sk#2], [c_current_cdemo_sk#2] -(30) Scan parquet spark_catalog.default.customer_demographics +(30) CometScan parquet spark_catalog.default.customer_demographics Output [9]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_education_status#25, cd_purchase_estimate#26, cd_credit_rating#27, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -233,7 +233,7 @@ BroadcastExchange (44) +- CometScan parquet spark_catalog.default.date_dim (40) -(40) Scan parquet spark_catalog.default.date_dim +(40) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_moy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q11/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q11/explain.txt index 52d523ca7..689697306 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q11/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q11/explain.txt @@ -69,7 +69,7 @@ +- ReusedExchange (58) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -80,7 +80,7 @@ ReadSchema: struct 0.00)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#19, c_customer_id#20, c_first_name#21, c_last_name#22, c_preferred_cust_flag#23, c_birth_country#24, c_login#25, c_email_address#26] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -158,7 +158,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [ws_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ws_ext_sales_price#2, ws_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14/explain.txt index 2a92752c3..31a11bba9 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14/explain.txt @@ -86,7 +86,7 @@ +- CometScan parquet spark_catalog.default.date_dim (72) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -109,7 +109,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -132,7 +132,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -168,7 +168,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -244,7 +244,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -311,7 +311,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -343,7 +343,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -389,7 +389,7 @@ Functions [2]: [sum((cast(ss_quantity#2 as decimal(10,0)) * ss_list_price#3)), c Input [6]: [channel#50, i_brand_id#40, i_class_id#41, i_category_id#42, sales#51, number_sales#52] Condition : (isnotnull(sales#51) AND (cast(sales#51 as decimal(32,6)) > cast(Subquery scalar-subquery#53, [id=#54] as decimal(32,6)))) -(65) Scan parquet spark_catalog.default.store_sales +(65) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#55, ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -421,7 +421,7 @@ Arguments: [ss_item_sk#55], [i_item_sk#60], Inner, BuildRight Input [8]: [ss_item_sk#55, ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_item_sk#60, i_brand_id#61, i_class_id#62, i_category_id#63] Arguments: [ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_brand_id#61, i_class_id#62, i_category_id#63], [ss_quantity#56, ss_list_price#57, ss_sold_date_sk#58, i_brand_id#61, i_class_id#62, i_category_id#63] -(72) Scan parquet spark_catalog.default.date_dim +(72) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#64, d_week_seq#65] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -505,7 +505,7 @@ Subquery:1 Hosting operator id = 64 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (95) -(86) Scan parquet spark_catalog.default.store_sales +(86) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#74, ss_list_price#75, ss_sold_date_sk#76] Batched: true Location: InMemoryFileIndex [] @@ -524,7 +524,7 @@ Arguments: [ss_sold_date_sk#76], [d_date_sk#78], Inner, BuildRight Input [4]: [ss_quantity#74, ss_list_price#75, ss_sold_date_sk#76, d_date_sk#78] Arguments: [quantity#79, list_price#80], [ss_quantity#74 AS quantity#79, ss_list_price#75 AS list_price#80] -(90) Scan parquet spark_catalog.default.catalog_sales +(90) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#81, cs_list_price#82, cs_sold_date_sk#83] Batched: true Location: InMemoryFileIndex [] @@ -543,7 +543,7 @@ Arguments: [cs_sold_date_sk#83], [d_date_sk#85], Inner, BuildRight Input [4]: [cs_quantity#81, cs_list_price#82, cs_sold_date_sk#83, d_date_sk#85] Arguments: [quantity#86, list_price#87], [cs_quantity#81 AS quantity#86, cs_list_price#82 AS list_price#87] -(94) Scan parquet spark_catalog.default.web_sales +(94) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#88, ws_list_price#89, ws_sold_date_sk#90] Batched: true Location: InMemoryFileIndex [] @@ -598,7 +598,7 @@ BroadcastExchange (107) +- CometScan parquet spark_catalog.default.date_dim (103) -(103) Scan parquet spark_catalog.default.date_dim +(103) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_week_seq#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -627,7 +627,7 @@ Subquery:6 Hosting operator id = 104 Hosting Expression = Subquery scalar-subque +- CometScan parquet spark_catalog.default.date_dim (108) -(108) Scan parquet spark_catalog.default.date_dim +(108) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#98, d_year#99, d_moy#100, d_dom#101] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -653,7 +653,7 @@ BroadcastExchange (116) +- CometScan parquet spark_catalog.default.date_dim (112) -(112) Scan parquet spark_catalog.default.date_dim +(112) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#102] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -691,7 +691,7 @@ BroadcastExchange (121) +- CometScan parquet spark_catalog.default.date_dim (117) -(117) Scan parquet spark_catalog.default.date_dim +(117) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#64, d_week_seq#65] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -720,7 +720,7 @@ Subquery:13 Hosting operator id = 118 Hosting Expression = Subquery scalar-subqu +- CometScan parquet spark_catalog.default.date_dim (122) -(122) Scan parquet spark_catalog.default.date_dim +(122) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#103, d_year#104, d_moy#105, d_dom#106] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14a/explain.txt index 04ffdbb5f..4bc928e63 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14a/explain.txt @@ -123,7 +123,7 @@ +- ReusedExchange (112) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ ReadSchema: struct Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -146,7 +146,7 @@ ReadSchema: struct Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] Condition : isnotnull(ss_item_sk#10) -(7) Scan parquet spark_catalog.default.item +(7) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -169,7 +169,7 @@ ReadSchema: struct Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] Condition : isnotnull(cs_item_sk#17) -(11) Scan parquet spark_catalog.default.item +(11) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -205,7 +205,7 @@ Arguments: [cs_item_sk#17], [i_item_sk#20], Inner, BuildRight Input [6]: [cs_item_sk#17, cs_sold_date_sk#18, i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Arguments: [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23], [cs_sold_date_sk#18, i_brand_id#21, i_class_id#22, i_category_id#23] -(16) Scan parquet spark_catalog.default.date_dim +(16) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -281,7 +281,7 @@ Input [3]: [brand_id#27, class_id#28, category_id#29] Keys [3]: [brand_id#27, class_id#28, category_id#29] Functions: [] -(33) Scan parquet spark_catalog.default.web_sales +(33) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#30, ws_sold_date_sk#31] Batched: true Location: InMemoryFileIndex [] @@ -348,7 +348,7 @@ Left output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk# Right output [1]: [ss_item_sk#38] Arguments: [ss_item_sk#1], [ss_item_sk#38], LeftSemi, BuildRight -(48) Scan parquet spark_catalog.default.item +(48) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -380,7 +380,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#39], Inner, BuildRight Input [8]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_item_sk#39, i_brand_id#40, i_class_id#41, i_category_id#42] Arguments: [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42], [ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4, i_brand_id#40, i_class_id#41, i_category_id#42] -(55) Scan parquet spark_catalog.default.date_dim +(55) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -426,7 +426,7 @@ Functions [2]: [sum((cast(ss_quantity#2 as decimal(10,0)) * ss_list_price#3)), c Input [6]: [channel#49, i_brand_id#40, i_class_id#41, i_category_id#42, sales#50, number_sales#51] Condition : (isnotnull(sales#50) AND (cast(sales#50 as decimal(32,6)) > cast(Subquery scalar-subquery#52, [id=#53] as decimal(32,6)))) -(65) Scan parquet spark_catalog.default.catalog_sales +(65) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#54, cs_quantity#55, cs_list_price#56, cs_sold_date_sk#57] Batched: true Location: InMemoryFileIndex [] @@ -488,7 +488,7 @@ Functions [2]: [sum((cast(cs_quantity#55 as decimal(10,0)) * cs_list_price#56)), Input [6]: [channel#67, i_brand_id#60, i_class_id#61, i_category_id#62, sales#68, number_sales#69] Condition : (isnotnull(sales#68) AND (cast(sales#68 as decimal(32,6)) > cast(ReusedSubquery Subquery scalar-subquery#52, [id=#53] as decimal(32,6)))) -(79) Scan parquet spark_catalog.default.web_sales +(79) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#70, ws_quantity#71, ws_list_price#72, ws_sold_date_sk#73] Batched: true Location: InMemoryFileIndex [] @@ -710,7 +710,7 @@ Subquery:1 Hosting operator id = 64 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (135) -(123) Scan parquet spark_catalog.default.store_sales +(123) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#121, ss_list_price#122, ss_sold_date_sk#123] Batched: true Location: InMemoryFileIndex [] @@ -729,14 +729,14 @@ Arguments: [ss_sold_date_sk#123], [d_date_sk#125], Inner, BuildRight Input [4]: [ss_quantity#121, ss_list_price#122, ss_sold_date_sk#123, d_date_sk#125] Arguments: [quantity#126, list_price#127], [ss_quantity#121 AS quantity#126, ss_list_price#122 AS list_price#127] -(127) Scan parquet spark_catalog.default.catalog_sales +(127) CometScan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#128, cs_list_price#129, cs_sold_date_sk#130] Batched: true Location: InMemoryFileIndex [] PartitionFilters: [isnotnull(cs_sold_date_sk#130), dynamicpruningexpression(cs_sold_date_sk#130 IN dynamicpruning#131)] ReadSchema: struct -(128) Scan parquet spark_catalog.default.date_dim +(128) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#132, d_year#133] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -764,7 +764,7 @@ Arguments: [cs_sold_date_sk#130], [d_date_sk#132], Inner, BuildRight Input [4]: [cs_quantity#128, cs_list_price#129, cs_sold_date_sk#130, d_date_sk#132] Arguments: [quantity#134, list_price#135], [cs_quantity#128 AS quantity#134, cs_list_price#129 AS list_price#135] -(134) Scan parquet spark_catalog.default.web_sales +(134) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#136, ws_list_price#137, ws_sold_date_sk#138] Batched: true Location: InMemoryFileIndex [] @@ -815,7 +815,7 @@ BroadcastExchange (147) +- CometScan parquet spark_catalog.default.date_dim (143) -(143) Scan parquet spark_catalog.default.date_dim +(143) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#132, d_year#133] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -847,7 +847,7 @@ BroadcastExchange (152) +- CometScan parquet spark_catalog.default.date_dim (148) -(148) Scan parquet spark_catalog.default.date_dim +(148) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#43, d_year#44, d_moy#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -877,7 +877,7 @@ BroadcastExchange (157) +- CometScan parquet spark_catalog.default.date_dim (153) -(153) Scan parquet spark_catalog.default.date_dim +(153) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#26, d_year#146] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q18a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q18a/explain.txt index 9990a2dfc..5c25745ba 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q18a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q18a/explain.txt @@ -144,7 +144,7 @@ +- CometScan parquet spark_catalog.default.item (133) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -156,7 +156,7 @@ ReadSchema: struct Input [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Condition : isnotnull(cs_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ Arguments: [cs_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [cs_ext_sales_price#2, cs_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -134,7 +134,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(23) Scan parquet spark_catalog.default.date_dim +(23) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22/explain.txt index 2810779ed..34d510fc7 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22/explain.txt @@ -25,7 +25,7 @@ TakeOrderedAndProject (24) +- CometScan parquet spark_catalog.default.warehouse (15) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ ReadSchema: struct Input [3]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3] Condition : isnotnull(inv_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -65,7 +65,7 @@ Arguments: [inv_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3, d_date_sk#5] Arguments: [inv_item_sk#1, inv_quantity_on_hand#2], [inv_item_sk#1, inv_quantity_on_hand#2] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [5]: [i_item_sk#7, i_brand#8, i_class#9, i_category#10, i_product_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -92,7 +92,7 @@ Arguments: [inv_quantity_on_hand#2, i_brand#8, i_class#9, i_category#10, i_produ (14) ColumnarToRow [codegen id : 2] Input [5]: [inv_quantity_on_hand#2, i_brand#8, i_class#9, i_category#10, i_product_name#11] -(15) Scan parquet spark_catalog.default.warehouse +(15) CometScan parquet spark_catalog.default.warehouse Output: [] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -149,7 +149,7 @@ BroadcastExchange (29) +- CometScan parquet spark_catalog.default.date_dim (25) -(25) Scan parquet spark_catalog.default.date_dim +(25) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22a/explain.txt index 852eedea6..b284c9b27 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22a/explain.txt @@ -47,7 +47,7 @@ +- ReusedExchange (39) -(1) Scan parquet spark_catalog.default.inventory +(1) CometScan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -59,7 +59,7 @@ ReadSchema: struct= 15) AND (cnt#17 <= 20)) -(25) Scan parquet spark_catalog.default.customer +(25) CometScan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -192,7 +192,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(33) Scan parquet spark_catalog.default.date_dim +(33) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_dom#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35/explain.txt index 790000085..5933ea5fa 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35/explain.txt @@ -45,7 +45,7 @@ TakeOrderedAndProject (44) +- CometScan parquet spark_catalog.default.customer_demographics (35) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -56,14 +56,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -103,7 +103,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], LeftSemi, BuildRight (12) ColumnarToRow [codegen id : 5] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(13) Scan parquet spark_catalog.default.web_sales +(13) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#12, ws_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -135,7 +135,7 @@ Right keys [1]: [ws_bill_customer_sk#12] Join type: ExistenceJoin(exists#2) Join condition: None -(20) Scan parquet spark_catalog.default.catalog_sales +(20) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -175,7 +175,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(29) Scan parquet spark_catalog.default.customer_address +(29) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_state#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -203,7 +203,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#4, ca_state#21] Input [4]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#20, ca_state#21] -(35) Scan parquet spark_catalog.default.customer_demographics +(35) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_dep_count#25, cd_dep_employed_count#26, cd_dep_college_count#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(45) Scan parquet spark_catalog.default.date_dim +(45) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_qoy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35a/explain.txt index 9f321ebda..c5bb66987 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35a/explain.txt @@ -39,7 +39,7 @@ +- CometScan parquet spark_catalog.default.customer_demographics (29) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -50,14 +50,14 @@ ReadSchema: struct -(4) Scan parquet spark_catalog.default.date_dim +(4) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_qoy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -94,7 +94,7 @@ Left output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Right output [1]: [ss_customer_sk#4] Arguments: [c_customer_sk#1], [ss_customer_sk#4], LeftSemi, BuildRight -(12) Scan parquet spark_catalog.default.web_sales +(12) CometScan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -113,7 +113,7 @@ Arguments: [ws_sold_date_sk#11], [d_date_sk#13], Inner, BuildRight Input [3]: [ws_bill_customer_sk#10, ws_sold_date_sk#11, d_date_sk#13] Arguments: [customsk#14], [ws_bill_customer_sk#10 AS customsk#14] -(16) Scan parquet spark_catalog.default.catalog_sales +(16) CometScan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#15, cs_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -149,7 +149,7 @@ Arguments: [c_customer_sk#1], [customsk#14], LeftSemi, BuildRight Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Arguments: [c_current_cdemo_sk#2, c_current_addr_sk#3], [c_current_cdemo_sk#2, c_current_addr_sk#3] -(24) Scan parquet spark_catalog.default.customer_address +(24) CometScan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#20, ca_state#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -173,7 +173,7 @@ Arguments: [c_current_addr_sk#3], [ca_address_sk#20], Inner, BuildRight Input [4]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#20, ca_state#21] Arguments: [c_current_cdemo_sk#2, ca_state#21], [c_current_cdemo_sk#2, ca_state#21] -(29) Scan parquet spark_catalog.default.customer_demographics +(29) CometScan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_dep_count#25, cd_dep_employed_count#26, cd_dep_college_count#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -228,7 +228,7 @@ BroadcastExchange (43) +- CometScan parquet spark_catalog.default.date_dim (39) -(39) Scan parquet spark_catalog.default.date_dim +(39) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_qoy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q36a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q36a/explain.txt index 92441bde0..e58a49d58 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q36a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q36a/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- ReusedExchange (28) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -55,7 +55,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.store_sales +(3) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ Arguments: [i_item_sk#1], [ss_item_sk#4], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -108,7 +108,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.store +(13) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -263,7 +263,7 @@ BroadcastExchange (51) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q49/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q49/explain.txt index 7d9198ed5..685f048de 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q49/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q49/explain.txt @@ -78,7 +78,7 @@ TakeOrderedAndProject (77) +- ReusedExchange (59) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [6]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_net_profit#5, ws_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -98,7 +98,7 @@ Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_so Input [5]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] -(5) Scan parquet spark_catalog.default.web_returns +(5) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11, wr_returned_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -122,7 +122,7 @@ Arguments: [ws_order_number#2, ws_item_sk#1], [wr_order_number#9, wr_item_sk#8], Input [9]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11] Arguments: [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11], [ws_item_sk#1, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6, wr_return_quantity#10, wr_return_amt#11] -(10) Scan parquet spark_catalog.default.date_dim +(10) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -195,7 +195,7 @@ Condition : ((return_rank#25 <= 10) OR (currency_rank#26 <= 10)) Output [5]: [web AS channel#27, item#22, return_ratio#23, return_rank#25, currency_rank#26] Input [5]: [item#22, return_ratio#23, currency_ratio#24, return_rank#25, currency_rank#26] -(27) Scan parquet spark_catalog.default.catalog_sales +(27) CometScan parquet spark_catalog.default.catalog_sales Output [6]: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_net_profit#32, cs_sold_date_sk#33] Batched: true Location: InMemoryFileIndex [] @@ -215,7 +215,7 @@ Arguments: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, c Input [5]: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_sold_date_sk#33] Arguments: [cs_item_sk#28, cs_order_number#29, cs_quantity#30, cs_net_paid#31, cs_sold_date_sk#33] -(31) Scan parquet spark_catalog.default.catalog_returns +(31) CometScan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#35, cr_order_number#36, cr_return_quantity#37, cr_return_amount#38, cr_returned_date_sk#39] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -296,7 +296,7 @@ Condition : ((return_rank#50 <= 10) OR (currency_rank#51 <= 10)) Output [5]: [catalog AS channel#52, item#47, return_ratio#48, return_rank#50, currency_rank#51] Input [5]: [item#47, return_ratio#48, currency_ratio#49, return_rank#50, currency_rank#51] -(50) Scan parquet spark_catalog.default.store_sales +(50) CometScan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_net_profit#57, ss_sold_date_sk#58] Batched: true Location: InMemoryFileIndex [] @@ -316,7 +316,7 @@ Arguments: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, Input [5]: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_sold_date_sk#58] Arguments: [ss_item_sk#53, ss_ticket_number#54, ss_quantity#55, ss_net_paid#56, ss_sold_date_sk#58] -(54) Scan parquet spark_catalog.default.store_returns +(54) CometScan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#60, sr_ticket_number#61, sr_return_quantity#62, sr_return_amt#63, sr_returned_date_sk#64] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -431,7 +431,7 @@ BroadcastExchange (82) +- CometScan parquet spark_catalog.default.date_dim (78) -(78) Scan parquet spark_catalog.default.date_dim +(78) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#14, d_moy#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q51a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q51a/explain.txt index 5d728a1c5..3211e46f6 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q51a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q51a/explain.txt @@ -73,7 +73,7 @@ TakeOrderedAndProject (72) +- ReusedExchange (62) -(1) Scan parquet spark_catalog.default.web_sales +(1) CometScan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -85,7 +85,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -204,7 +204,7 @@ Arguments: hashpartitioning(item_sk#9, d_date#6, 5), ENSURE_REQUIREMENTS, [plan_ Input [3]: [item_sk#9, d_date#6, cume_sales#22] Arguments: [item_sk#9 ASC NULLS FIRST, d_date#6 ASC NULLS FIRST], false, 0 -(30) Scan parquet spark_catalog.default.store_sales +(30) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#23, ss_sales_price#24, ss_sold_date_sk#25] Batched: true Location: InMemoryFileIndex [] @@ -406,7 +406,7 @@ BroadcastExchange (77) +- CometScan parquet spark_catalog.default.date_dim (73) -(73) Scan parquet spark_catalog.default.date_dim +(73) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q57/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q57/explain.txt index 97f571929..4b3a3c576 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q57/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q57/explain.txt @@ -48,7 +48,7 @@ TakeOrderedAndProject (47) +- ReusedExchange (39) -(1) Scan parquet spark_catalog.default.item +(1) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#1, i_brand#2, i_category#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -59,7 +59,7 @@ ReadSchema: struct Input [3]: [i_item_sk#1, i_brand#2, i_category#3] Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull(i_brand#2)) -(3) Scan parquet spark_catalog.default.catalog_sales +(3) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ Arguments: [i_item_sk#1], [cs_item_sk#5], Inner, BuildRight Input [7]: [i_item_sk#1, i_brand#2, i_category#3, cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -108,7 +108,7 @@ Arguments: [cs_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [8]: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] Arguments: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11], [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11] -(13) Scan parquet spark_catalog.default.call_center +(13) CometScan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#12, cc_name#13] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] @@ -263,7 +263,7 @@ BroadcastExchange (51) +- CometScan parquet spark_catalog.default.date_dim (48) -(48) Scan parquet spark_catalog.default.date_dim +(48) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt index a826e7d78..564222abf 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt @@ -84,7 +84,7 @@ TakeOrderedAndProject (83) +- ReusedExchange (74) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ Condition : isnotnull(ss_store_sk#1) Input [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Arguments: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11], [ss_store_sk#1 AS store_sk#6, ss_sold_date_sk#4 AS date_sk#7, ss_ext_sales_price#2 AS sales_price#8, ss_net_profit#3 AS profit#9, 0.00 AS return_amt#10, 0.00 AS net_loss#11] -(4) Scan parquet spark_catalog.default.store_returns +(4) CometScan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -120,7 +120,7 @@ Arguments: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, n Child 0 Input [6]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11] Child 1 Input [6]: [store_sk#16, date_sk#17, sales_price#18, profit#19, return_amt#20, net_loss#21] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#22, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -148,7 +148,7 @@ Arguments: [date_sk#7], [d_date_sk#22], Inner, BuildRight Input [7]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11, d_date_sk#22] Arguments: [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11], [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11] -(14) Scan parquet spark_catalog.default.store +(14) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#24, s_store_id#25] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -191,7 +191,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#8)), sum(UnscaledValue(return_amt# Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#8))#30, sum(UnscaledValue(return_amt#10))#31, sum(UnscaledValue(profit#9))#32, sum(UnscaledValue(net_loss#11))#33] Results [5]: [store channel AS channel#34, concat(store, s_store_id#25) AS id#35, MakeDecimal(sum(UnscaledValue(sales_price#8))#30,17,2) AS sales#36, MakeDecimal(sum(UnscaledValue(return_amt#10))#31,17,2) AS returns#37, (MakeDecimal(sum(UnscaledValue(profit#9))#32,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#11))#33,17,2)) AS profit#38] -(23) Scan parquet spark_catalog.default.catalog_sales +(23) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_catalog_page_sk#39, cs_ext_sales_price#40, cs_net_profit#41, cs_sold_date_sk#42] Batched: true Location: InMemoryFileIndex [] @@ -207,7 +207,7 @@ Condition : isnotnull(cs_catalog_page_sk#39) Input [4]: [cs_catalog_page_sk#39, cs_ext_sales_price#40, cs_net_profit#41, cs_sold_date_sk#42] Arguments: [page_sk#44, date_sk#45, sales_price#46, profit#47, return_amt#48, net_loss#49], [cs_catalog_page_sk#39 AS page_sk#44, cs_sold_date_sk#42 AS date_sk#45, cs_ext_sales_price#40 AS sales_price#46, cs_net_profit#41 AS profit#47, 0.00 AS return_amt#48, 0.00 AS net_loss#49] -(26) Scan parquet spark_catalog.default.catalog_returns +(26) CometScan parquet spark_catalog.default.catalog_returns Output [4]: [cr_catalog_page_sk#50, cr_return_amount#51, cr_net_loss#52, cr_returned_date_sk#53] Batched: true Location: InMemoryFileIndex [] @@ -239,7 +239,7 @@ Arguments: [date_sk#45], [d_date_sk#60], Inner, BuildRight Input [7]: [page_sk#44, date_sk#45, sales_price#46, profit#47, return_amt#48, net_loss#49, d_date_sk#60] Arguments: [page_sk#44, sales_price#46, profit#47, return_amt#48, net_loss#49], [page_sk#44, sales_price#46, profit#47, return_amt#48, net_loss#49] -(33) Scan parquet spark_catalog.default.catalog_page +(33) CometScan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#61, cp_catalog_page_id#62] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -282,7 +282,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#46)), sum(UnscaledValue(return_amt Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#46))#67, sum(UnscaledValue(return_amt#48))#68, sum(UnscaledValue(profit#47))#69, sum(UnscaledValue(net_loss#49))#70] Results [5]: [catalog channel AS channel#71, concat(catalog_page, cp_catalog_page_id#62) AS id#72, MakeDecimal(sum(UnscaledValue(sales_price#46))#67,17,2) AS sales#73, MakeDecimal(sum(UnscaledValue(return_amt#48))#68,17,2) AS returns#74, (MakeDecimal(sum(UnscaledValue(profit#47))#69,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#49))#70,17,2)) AS profit#75] -(42) Scan parquet spark_catalog.default.web_sales +(42) CometScan parquet spark_catalog.default.web_sales Output [4]: [ws_web_site_sk#76, ws_ext_sales_price#77, ws_net_profit#78, ws_sold_date_sk#79] Batched: true Location: InMemoryFileIndex [] @@ -298,7 +298,7 @@ Condition : isnotnull(ws_web_site_sk#76) Input [4]: [ws_web_site_sk#76, ws_ext_sales_price#77, ws_net_profit#78, ws_sold_date_sk#79] Arguments: [wsr_web_site_sk#81, date_sk#82, sales_price#83, profit#84, return_amt#85, net_loss#86], [ws_web_site_sk#76 AS wsr_web_site_sk#81, ws_sold_date_sk#79 AS date_sk#82, ws_ext_sales_price#77 AS sales_price#83, ws_net_profit#78 AS profit#84, 0.00 AS return_amt#85, 0.00 AS net_loss#86] -(45) Scan parquet spark_catalog.default.web_returns +(45) CometScan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#87, wr_order_number#88, wr_return_amt#89, wr_net_loss#90, wr_returned_date_sk#91] Batched: true Location: InMemoryFileIndex [] @@ -309,7 +309,7 @@ ReadSchema: struct Input [2]: [ca_address_sk#1, ca_state#2] Condition : isnotnull(ca_address_sk#1) -(3) Scan parquet spark_catalog.default.customer +(3) CometScan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#3, c_current_addr_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -74,7 +74,7 @@ Arguments: [ca_address_sk#1], [c_current_addr_sk#4], Inner, BuildRight Input [4]: [ca_address_sk#1, ca_state#2, c_customer_sk#3, c_current_addr_sk#4] Arguments: [ca_state#2, c_customer_sk#3], [ca_state#2, c_customer_sk#3] -(8) Scan parquet spark_catalog.default.store_sales +(8) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -99,7 +99,7 @@ Arguments: [c_customer_sk#3], [ss_customer_sk#6], Inner, BuildRight Input [5]: [ca_state#2, c_customer_sk#3, ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Arguments: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7], [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7] -(13) Scan parquet spark_catalog.default.date_dim +(13) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -127,7 +127,7 @@ Arguments: [ss_sold_date_sk#7], [d_date_sk#9], Inner, BuildRight Input [4]: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7, d_date_sk#9] Arguments: [ca_state#2, ss_item_sk#5], [ca_state#2, ss_item_sk#5] -(19) Scan parquet spark_catalog.default.item +(19) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#13, i_current_price#14, i_category#15] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -138,7 +138,7 @@ ReadSchema: struct Input [3]: [i_item_sk#13, i_current_price#14, i_category#15] Condition : ((isnotnull(i_current_price#14) AND isnotnull(i_category#15)) AND isnotnull(i_item_sk#13)) -(21) Scan parquet spark_catalog.default.item +(21) CometScan parquet spark_catalog.default.item Output [2]: [i_current_price#16, i_category#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -228,7 +228,7 @@ BroadcastExchange (43) +- CometScan parquet spark_catalog.default.date_dim (39) -(39) Scan parquet spark_catalog.default.date_dim +(39) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -260,7 +260,7 @@ Subquery:2 Hosting operator id = 40 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (44) -(44) Scan parquet spark_catalog.default.date_dim +(44) CometScan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#24, d_year#25, d_moy#26] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q64/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q64/explain.txt index 77a8f008c..4d8ac469c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q64/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q64/explain.txt @@ -174,7 +174,7 @@ +- ReusedExchange (162) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -190,7 +190,7 @@ Condition : (((((((isnotnull(ss_item_sk#1) AND isnotnull(ss_ticket_number#8)) AN Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] -(4) Scan parquet spark_catalog.default.store_returns +(4) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#14, sr_ticket_number#15, sr_returned_date_sk#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -222,7 +222,7 @@ Arguments: hashpartitioning(ss_item_sk#1, 5), ENSURE_REQUIREMENTS, CometNativeSh Input [11]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12], [ss_item_sk#1 ASC NULLS FIRST] -(11) Scan parquet spark_catalog.default.catalog_sales +(11) CometScan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19, cs_sold_date_sk#20] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -245,7 +245,7 @@ Arguments: hashpartitioning(cs_item_sk#17, cs_order_number#18, 5), ENSURE_REQUIR Input [3]: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19] Arguments: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19], [cs_item_sk#17 ASC NULLS FIRST, cs_order_number#18 ASC NULLS FIRST] -(16) Scan parquet spark_catalog.default.catalog_returns +(16) CometScan parquet spark_catalog.default.catalog_returns Output [6]: [cr_item_sk#21, cr_order_number#22, cr_refunded_cash#23, cr_reversed_charge#24, cr_store_credit#25, cr_returned_date_sk#26] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -312,7 +312,7 @@ Arguments: [ss_item_sk#1], [cs_item_sk#17], Inner Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12, cs_item_sk#17] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12], [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] -(31) Scan parquet spark_catalog.default.date_dim +(31) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#32, d_year#33] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -336,7 +336,7 @@ Arguments: [ss_sold_date_sk#12], [d_date_sk#32], Inner, BuildRight Input [13]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12, d_date_sk#32, d_year#33] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33], [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33] -(36) Scan parquet spark_catalog.default.store +(36) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#34, s_store_name#35, s_zip#36] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -360,7 +360,7 @@ Arguments: [ss_store_sk#6], [s_store_sk#34], Inner, BuildRight Input [14]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_sk#34, s_store_name#35, s_zip#36] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36], [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36] -(41) Scan parquet spark_catalog.default.customer +(41) CometScan parquet spark_catalog.default.customer Output [6]: [c_customer_sk#37, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, c_first_sales_date_sk#42] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -384,7 +384,7 @@ Arguments: [ss_customer_sk#2], [c_customer_sk#37], Inner, BuildRight Input [18]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_customer_sk#37, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, c_first_sales_date_sk#42] Arguments: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, c_first_sales_date_sk#42], [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, c_first_sales_date_sk#42] -(46) Scan parquet spark_catalog.default.date_dim +(46) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#43, d_year#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -420,7 +420,7 @@ Arguments: [c_first_shipto_date_sk#41], [d_date_sk#45], Inner, BuildRight Input [18]: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, c_first_shipto_date_sk#41, d_year#44, d_date_sk#45, d_year#46] Arguments: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46], [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46] -(54) Scan parquet spark_catalog.default.customer_demographics +(54) CometScan parquet spark_catalog.default.customer_demographics Output [2]: [cd_demo_sk#47, cd_marital_status#48] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -456,7 +456,7 @@ Arguments: [c_current_cdemo_sk#38], [cd_demo_sk#49], Inner, NOT (cd_marital_stat Input [18]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_cdemo_sk#38, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46, cd_marital_status#48, cd_demo_sk#49, cd_marital_status#50] Arguments: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46], [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46] -(62) Scan parquet spark_catalog.default.promotion +(62) CometScan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#51] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -480,7 +480,7 @@ Arguments: [ss_promo_sk#7], [p_promo_sk#51], Inner, BuildRight Input [15]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46, p_promo_sk#51] Arguments: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46], [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46] -(67) Scan parquet spark_catalog.default.household_demographics +(67) CometScan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#52, hd_income_band_sk#53] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -516,7 +516,7 @@ Arguments: [c_current_hdemo_sk#39], [hd_demo_sk#54], Inner, BuildRight Input [15]: [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_hdemo_sk#39, c_current_addr_sk#40, d_year#44, d_year#46, hd_income_band_sk#53, hd_demo_sk#54, hd_income_band_sk#55] Arguments: [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_addr_sk#40, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55], [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_addr_sk#40, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55] -(75) Scan parquet spark_catalog.default.customer_address +(75) CometScan parquet spark_catalog.default.customer_address Output [5]: [ca_address_sk#56, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -552,7 +552,7 @@ Arguments: [c_current_addr_sk#40], [ca_address_sk#61], Inner, BuildRight Input [21]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, c_current_addr_sk#40, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_address_sk#61, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65] Arguments: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65], [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, hd_income_band_sk#53, hd_income_band_sk#55, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65] -(83) Scan parquet spark_catalog.default.income_band +(83) CometScan parquet spark_catalog.default.income_band Output [1]: [ib_income_band_sk#66] Batched: true Location [not included in comparison]/{warehouse_dir}/income_band] @@ -588,7 +588,7 @@ Arguments: [hd_income_band_sk#55], [ib_income_band_sk#67], Inner, BuildRight Input [19]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, hd_income_band_sk#55, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ib_income_band_sk#67] Arguments: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65], [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#33, s_store_name#35, s_zip#36, d_year#44, d_year#46, ca_street_number#57, ca_street_name#58, ca_city#59, ca_zip#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65] -(91) Scan parquet spark_catalog.default.item +(91) CometScan parquet spark_catalog.default.item Output [4]: [i_item_sk#68, i_current_price#69, i_color#70, i_product_name#71] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -634,7 +634,7 @@ Arguments: hashpartitioning(item_sk#77, store_name#78, store_zip#79, 5), ENSURE_ Input [17]: [product_name#76, item_sk#77, store_name#78, store_zip#79, b_street_number#80, b_streen_name#81, b_city#82, b_zip#83, c_street_number#84, c_street_name#85, c_city#86, c_zip#87, syear#88, cnt#89, s1#90, s2#91, s3#92] Arguments: [product_name#76, item_sk#77, store_name#78, store_zip#79, b_street_number#80, b_streen_name#81, b_city#82, b_zip#83, c_street_number#84, c_street_name#85, c_city#86, c_zip#87, syear#88, cnt#89, s1#90, s2#91, s3#92], [item_sk#77 ASC NULLS FIRST, store_name#78 ASC NULLS FIRST, store_zip#79 ASC NULLS FIRST] -(101) Scan parquet spark_catalog.default.store_sales +(101) CometScan parquet spark_catalog.default.store_sales Output [12]: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_ticket_number#100, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104] Batched: true Location: InMemoryFileIndex [] @@ -650,7 +650,7 @@ Condition : (((((((isnotnull(ss_item_sk#93) AND isnotnull(ss_ticket_number#100)) Input [12]: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_ticket_number#100, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104] Arguments: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_ticket_number#100, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104] -(104) Scan parquet spark_catalog.default.store_returns +(104) CometScan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#106, sr_ticket_number#107, sr_returned_date_sk#108] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -711,7 +711,7 @@ Arguments: [ss_item_sk#93], [cs_item_sk#109], Inner Input [12]: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104, cs_item_sk#109] Arguments: [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104], [ss_item_sk#93, ss_customer_sk#94, ss_cdemo_sk#95, ss_hdemo_sk#96, ss_addr_sk#97, ss_store_sk#98, ss_promo_sk#99, ss_wholesale_cost#101, ss_list_price#102, ss_coupon_amt#103, ss_sold_date_sk#104] -(118) Scan parquet spark_catalog.default.date_dim +(118) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#117, d_year#118] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -950,7 +950,7 @@ BroadcastExchange (177) +- CometScan parquet spark_catalog.default.date_dim (174) -(174) Scan parquet spark_catalog.default.date_dim +(174) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#32, d_year#33] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -975,7 +975,7 @@ BroadcastExchange (181) +- CometScan parquet spark_catalog.default.date_dim (178) -(178) Scan parquet spark_catalog.default.date_dim +(178) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#117, d_year#118] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q67a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q67a/explain.txt index 4044e13af..379144482 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q67a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q67a/explain.txt @@ -69,7 +69,7 @@ TakeOrderedAndProject (68) +- ReusedExchange (57) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_quantity#3, ss_sales_price#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -81,7 +81,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Condition : isnotnull(ss_store_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -101,7 +101,7 @@ Arguments: [ss_store_sk#1, ss_net_profit#2], [ss_store_sk#1, ss_net_profit#2] (9) ColumnarToRow [codegen id : 4] Input [2]: [ss_store_sk#1, ss_net_profit#2] -(10) Scan parquet spark_catalog.default.store +(10) CometScan parquet spark_catalog.default.store Output [3]: [s_store_sk#7, s_county#8, s_state#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -115,7 +115,7 @@ Condition : isnotnull(s_store_sk#7) (12) ColumnarToRow [codegen id : 3] Input [3]: [s_store_sk#7, s_county#8, s_state#9] -(13) Scan parquet spark_catalog.default.store_sales +(13) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -127,7 +127,7 @@ ReadSchema: struct Input [3]: [ss_store_sk#10, ss_net_profit#11, ss_sold_date_sk#12] Condition : isnotnull(ss_store_sk#10) -(15) Scan parquet spark_catalog.default.store +(15) CometScan parquet spark_catalog.default.store Output [2]: [s_store_sk#14, s_state#15] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -344,7 +344,7 @@ BroadcastExchange (62) +- CometScan parquet spark_catalog.default.date_dim (58) -(58) Scan parquet spark_catalog.default.date_dim +(58) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt index 8ac4aab45..45f7e2e66 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q72/explain.txt @@ -65,7 +65,7 @@ +- CometScan parquet spark_catalog.default.catalog_returns (53) -(1) Scan parquet spark_catalog.default.catalog_sales +(1) CometScan parquet spark_catalog.default.catalog_sales Output [8]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, cs_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -77,7 +77,7 @@ ReadSchema: struct date_add(d_d Input [10]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24, d_date_sk#28, d_date#29] Arguments: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] -(46) Scan parquet spark_catalog.default.promotion +(46) CometScan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#30] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -314,7 +314,7 @@ Arguments: hashpartitioning(cs_item_sk#4, cs_order_number#6, 5), ENSURE_REQUIREM Input [5]: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Arguments: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24], [cs_item_sk#4 ASC NULLS FIRST, cs_order_number#6 ASC NULLS FIRST] -(53) Scan parquet spark_catalog.default.catalog_returns +(53) CometScan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#31, cr_order_number#32, cr_returned_date_sk#33] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -377,7 +377,7 @@ BroadcastExchange (69) +- CometScan parquet spark_catalog.default.date_dim (65) -(65) Scan parquet spark_catalog.default.date_dim +(65) CometScan parquet spark_catalog.default.date_dim Output [4]: [d_date_sk#22, d_date#23, d_week_seq#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q74/explain.txt index 4072b2277..12d5134a0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q74/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q74/explain.txt @@ -69,7 +69,7 @@ +- ReusedExchange (58) -(1) Scan parquet spark_catalog.default.customer +(1) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -80,7 +80,7 @@ ReadSchema: struct 0.00)) -(17) Scan parquet spark_catalog.default.customer +(17) CometScan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#14, c_customer_id#15, c_first_name#16, c_last_name#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -158,7 +158,7 @@ ReadSchema: struct Input [3]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3] Condition : isnotnull(ws_item_sk#1) -(3) Scan parquet spark_catalog.default.date_dim +(3) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -77,7 +77,7 @@ Arguments: [ws_sold_date_sk#3], [d_date_sk#5], Inner, BuildRight Input [4]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3, d_date_sk#5] Arguments: [ws_item_sk#1, ws_net_paid#2], [ws_item_sk#1, ws_net_paid#2] -(9) Scan parquet spark_catalog.default.item +(9) CometScan parquet spark_catalog.default.item Output [3]: [i_item_sk#7, i_class#8, i_category#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -211,7 +211,7 @@ BroadcastExchange (41) +- CometScan parquet spark_catalog.default.date_dim (37) -(37) Scan parquet spark_catalog.default.date_dim +(37) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt index 9eba711c1..4d870a8b8 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt @@ -25,7 +25,7 @@ +- CometScan parquet spark_catalog.default.date_dim (8) -(1) Scan parquet spark_catalog.default.store_sales +(1) CometScan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ ReadSchema: struct Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Condition : isnotnull(ss_item_sk#1) -(3) Scan parquet spark_catalog.default.item +(3) CometScan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -61,7 +61,7 @@ Arguments: [ss_item_sk#1], [i_item_sk#5], Inner, BuildRight Input [9]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Arguments: [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10], [ss_ext_sales_price#2, ss_sold_date_sk#3, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] -(8) Scan parquet spark_catalog.default.date_dim +(8) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -143,7 +143,7 @@ BroadcastExchange (29) +- CometScan parquet spark_catalog.default.date_dim (25) -(25) Scan parquet spark_catalog.default.date_dim +(25) CometScan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim]