Skip to content

Commit

Permalink
fix KSHC
Browse files Browse the repository at this point in the history
  • Loading branch information
pan3793 committed Feb 20, 2025
1 parent 81ea85d commit 06394a6
Showing 1 changed file with 19 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -88,33 +88,35 @@ object HiveConnectorUtils extends Logging {
sparkSession: SparkSession,
file: AnyRef,
filePath: Path,
isSplitable: Boolean,
maxSplitBytes: Long,
isSplitable: JBoolean,
maxSplitBytes: JLong,
partitionValues: InternalRow): Seq[PartitionedFile] = {

if (SPARK_RUNTIME_VERSION >= "4.0") { // SPARK-42821
if (SPARK_RUNTIME_VERSION >= "4.0") { // SPARK-42821, SPARK-51185
val fileStatusWithMetadataClz = DynClasses.builder()
.impl("org.apache.spark.sql.execution.datasources.FileStatusWithMetadata")
.build()
.buildChecked()
DynMethods
.builder("splitFiles")
.impl(
"org.apache.spark.sql.execution.PartitionedFileUtil",
fileStatusWithMetadataClz,
classOf[Path],
classOf[Boolean],
classOf[Long],
classOf[InternalRow])
.build()
.invoke[Seq[PartitionedFile]](
.buildChecked()
.invokeChecked[Seq[PartitionedFile]](
null,
file,
isSplitable.asInstanceOf[JBoolean],
maxSplitBytes.asInstanceOf[JLong],
filePath,
isSplitable,
maxSplitBytes,
partitionValues)
} else if (SPARK_RUNTIME_VERSION >= "3.5") { // SPARK-43039
val fileStatusWithMetadataClz = DynClasses.builder()
.impl("org.apache.spark.sql.execution.datasources.FileStatusWithMetadata")
.build()
.buildChecked()
DynMethods
.builder("splitFiles")
.impl(
Expand All @@ -124,13 +126,13 @@ object HiveConnectorUtils extends Logging {
classOf[Boolean],
classOf[Long],
classOf[InternalRow])
.build()
.invoke[Seq[PartitionedFile]](
.buildChecked()
.invokeChecked[Seq[PartitionedFile]](
null,
sparkSession,
file,
isSplitable.asInstanceOf[JBoolean],
maxSplitBytes.asInstanceOf[JLong],
isSplitable,
maxSplitBytes,
partitionValues)
} else if (SPARK_RUNTIME_VERSION >= "3.3") {
DynMethods
Expand All @@ -143,14 +145,14 @@ object HiveConnectorUtils extends Logging {
classOf[Boolean],
classOf[Long],
classOf[InternalRow])
.build()
.invoke[Seq[PartitionedFile]](
.buildChecked()
.invokeChecked[Seq[PartitionedFile]](
null,
sparkSession,
file,
filePath,
isSplitable.asInstanceOf[JBoolean],
maxSplitBytes.asInstanceOf[JLong],
isSplitable,
maxSplitBytes,
partitionValues)
} else {
throw unsupportedSparkVersion()
Expand Down

0 comments on commit 06394a6

Please sign in to comment.