From bc9faa97f7eaeaef8a914b6c58e7459fec85647a Mon Sep 17 00:00:00 2001 From: Cheng Pan Date: Thu, 20 Feb 2025 23:29:29 +0800 Subject: [PATCH 1/4] [SPARK-49489][SQL][HIVE] HMS client respects `hive.thrift.client.max.message.size` --- .../sql/hive/client/HiveClientImpl.scala | 59 +++++++++++++++++-- 1 file changed, 55 insertions(+), 4 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index 3e7e81d25d943..f5c400570a5e9 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.hive.client import java.io.{OutputStream, PrintStream} import java.lang.{Iterable => JIterable} +import java.lang.reflect.{Proxy => JdkProxy} import java.lang.reflect.InvocationTargetException import java.nio.charset.StandardCharsets.UTF_8 import java.util.{HashMap => JHashMap, Locale, Map => JMap} @@ -33,7 +34,7 @@ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.hive.common.StatsSetupConst import org.apache.hadoop.hive.conf.HiveConf -import org.apache.hadoop.hive.metastore.{IMetaStoreClient, TableType => HiveTableType} +import org.apache.hadoop.hive.metastore.{HiveMetaStoreClient, IMetaStoreClient, RetryingMetaStoreClient, TableType => HiveTableType} import org.apache.hadoop.hive.metastore.api.{Database => HiveDatabase, Table => MetaStoreApiTable, _} import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.metadata.{Hive, HiveException, Partition => HivePartition, Table => HiveTable} @@ -44,6 +45,7 @@ import org.apache.hadoop.hive.serde.serdeConstants import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe import org.apache.hadoop.security.UserGroupInformation +import org.apache.thrift.transport.TEndpointTransport import org.apache.spark.{SparkConf, SparkException, SparkThrowable} import org.apache.spark.deploy.SparkHadoopUtil.SOURCE_SPARK @@ -65,7 +67,7 @@ import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils} import org.apache.spark.sql.hive.HiveExternalCatalog.DATASOURCE_SCHEMA import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ -import org.apache.spark.util.{CircularBuffer, Utils} +import org.apache.spark.util.{CircularBuffer, SparkClassUtils, Utils} /** * A class that wraps the HiveClient and converts its responses to externally visible classes. @@ -1407,13 +1409,62 @@ private[hive] object HiveClientImpl extends Logging { case _ => new HiveConf(conf, classOf[HiveConf]) } - try { + val hive = try { Hive.getWithoutRegisterFns(hiveConf) } catch { // SPARK-37069: not all Hive versions have the above method (e.g., Hive 2.3.9 has it but - // 2.3.8 don't), therefore here we fallback when encountering the exception. + // 2.3.8 doesn't), therefore here we fallback when encountering the exception. case _: NoSuchMethodError => Hive.get(hiveConf) } + configureMaxThriftMessageSize(hiveConf, hive.getMSC) + hive + } + + // SPARK-49489: a surgery for Hive 2.3.10 due to lack of HIVE-26633 + private def configureMaxThriftMessageSize( + hiveConf: HiveConf, msClient: IMetaStoreClient): Unit = try { + msClient match { + // Hive uses Java Dynamic Proxy to enhance the MetaStoreClient to support synchronization + // and retrying, we should unwrap and access the real MetaStoreClient instance firstly + case proxy if JdkProxy.isProxyClass(proxy.getClass) => + JdkProxy.getInvocationHandler(proxy) match { + case syncHandler if syncHandler.getClass.getName.endsWith("SynchronizedHandler") => + val realMscField = SparkClassUtils.classForName( + "org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler") + .getDeclaredField("client") + realMscField.setAccessible(true) + val realMsc = realMscField.get(syncHandler).asInstanceOf[IMetaStoreClient] + configureMaxThriftMessageSize(hiveConf, realMsc) + case retryHandler: RetryingMetaStoreClient => + val realMscField = classOf[RetryingMetaStoreClient].getDeclaredField("base") + realMscField.setAccessible(true) + val realMsc = realMscField.get(retryHandler).asInstanceOf[IMetaStoreClient] + configureMaxThriftMessageSize(hiveConf, realMsc) + case _ => + } + + case msc: HiveMetaStoreClient if !msc.isLocalMetaStore => + msc.getTTransport match { + case t: TEndpointTransport => + // The configuration is added in HIVE-26633 (4.0.0) + val maxThriftMessageSize = HiveConf.toSizeBytes( + hiveConf.get("hive.thrift.client.max.message.size", "1gb")).toInt + if (t.getConfiguration.getMaxMessageSize != maxThriftMessageSize) { + t.getConfiguration.setMaxMessageSize(maxThriftMessageSize) + val resetConsumedMessageSizeMethod = classOf[TEndpointTransport] + .getDeclaredMethod("resetConsumedMessageSize", classOf[Long]) + resetConsumedMessageSizeMethod.setAccessible(true) + resetConsumedMessageSizeMethod.invoke(t, Long.box(-1L)) + } + case _ => + } + case _ => // do nothing + } + } catch { + // TEndpointTransport is added in THRIFT-5237 (0.14.0), for Hive versions that use older + // Thrift library (e.g. Hive 2.3.9 uses Thrift 0.9.3), which aren't affected by THRIFT-5237 + // and don't need to apply HIVE-26633 + case _: NoClassDefFoundError => // do nothing } } From 15390839e935b68b0d3056058848dfc6834ac014 Mon Sep 17 00:00:00 2001 From: Cheng Pan Date: Fri, 21 Feb 2025 14:28:00 +0800 Subject: [PATCH 2/4] address comments and fix ut --- ...HiveMetastoreLazyInitializationSuite.scala | 1 + .../spark/sql/hive/HiveExternalCatalog.scala | 21 ++++--- .../sql/hive/client/HiveClientImpl.scala | 61 +++++++++++-------- 3 files changed, 50 insertions(+), 33 deletions(-) diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreLazyInitializationSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreLazyInitializationSuite.scala index cb85993e5e099..da44aa642dae5 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreLazyInitializationSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreLazyInitializationSuite.scala @@ -34,6 +34,7 @@ class HiveMetastoreLazyInitializationSuite extends SparkFunSuite { .master("local[2]") .enableHiveSupport() .config("spark.hadoop.hive.metastore.uris", "thrift://127.0.0.1:11111") + .config("spark.hadoop.hive.thrift.client.max.message.size", "1gb") .getOrCreate() val originalLevel = LogManager.getRootLogger.asInstanceOf[Logger].getLevel val originalClassLoader = Thread.currentThread().getContextClassLoader diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala index ca8b5369a9cbb..72fea06936a10 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala @@ -22,6 +22,7 @@ import java.lang.reflect.InvocationTargetException import java.util import java.util.Locale +import scala.annotation.tailrec import scala.collection.mutable import scala.util.control.NonFatal @@ -81,14 +82,18 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat * Due to classloader isolation issues, pattern matching won't work here so we need * to compare the canonical names of the exceptions, which we assume to be stable. */ - private def isClientException(e: Throwable): Boolean = { - var temp: Class[_] = e.getClass - var found = false - while (temp != null && !found) { - found = clientExceptions.contains(temp.getCanonicalName) - temp = temp.getSuperclass - } - found + @tailrec + private def isClientException(e: Throwable): Boolean = e match { + case re: RuntimeException if re.getCause != null => + isClientException(re.getCause) + case e => + var temp: Class[_] = e.getClass + var found = false + while (temp != null && !found) { + found = clientExceptions.contains(temp.getCanonicalName) + temp = temp.getSuperclass + } + found } /** diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index f5c400570a5e9..261d0bccd31dc 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -19,8 +19,7 @@ package org.apache.spark.sql.hive.client import java.io.{OutputStream, PrintStream} import java.lang.{Iterable => JIterable} -import java.lang.reflect.{Proxy => JdkProxy} -import java.lang.reflect.InvocationTargetException +import java.lang.reflect.{InvocationTargetException, Method, Proxy => JdkProxy} import java.nio.charset.StandardCharsets.UTF_8 import java.util.{HashMap => JHashMap, Locale, Map => JMap} import java.util.concurrent.TimeUnit._ @@ -67,7 +66,7 @@ import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils} import org.apache.spark.sql.hive.HiveExternalCatalog.DATASOURCE_SCHEMA import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ -import org.apache.spark.util.{CircularBuffer, SparkClassUtils, Utils} +import org.apache.spark.util.{CircularBuffer, Utils} /** * A class that wraps the HiveClient and converts its responses to externally visible classes. @@ -1417,45 +1416,57 @@ private[hive] object HiveClientImpl extends Logging { case _: NoSuchMethodError => Hive.get(hiveConf) } - configureMaxThriftMessageSize(hiveConf, hive.getMSC) + + // Follow behavior of HIVE-26633 (4.0.0), only apply the max message size when + // `hive.thrift.client.max.message.size` is set and the value is positive + Option(hiveConf.get("hive.thrift.client.max.message.size")) + .map(HiveConf.toSizeBytes(_).toInt).filter(_ > 0) + .foreach { maxMessageSize => + logDebug(s"Trying to set metastore client thrift max message to $maxMessageSize") + configureMaxThriftMessageSize(hiveConf, hive.getMSC, maxMessageSize) + } + hive } + private def getFieldValue[T](obj: Any, fieldName: String): T = { + val field = obj.getClass.getDeclaredField(fieldName) + field.setAccessible(true) + field.get(obj).asInstanceOf[T] + } + + private def findMethod(klass: Class[_], name: String, args: Class[_]*): Method = { + val method = klass.getDeclaredMethod(name, args: _*) + method.setAccessible(true) + method + } + // SPARK-49489: a surgery for Hive 2.3.10 due to lack of HIVE-26633 private def configureMaxThriftMessageSize( - hiveConf: HiveConf, msClient: IMetaStoreClient): Unit = try { + hiveConf: HiveConf, msClient: IMetaStoreClient, maxMessageSize: Int): Unit = try { msClient match { // Hive uses Java Dynamic Proxy to enhance the MetaStoreClient to support synchronization // and retrying, we should unwrap and access the real MetaStoreClient instance firstly case proxy if JdkProxy.isProxyClass(proxy.getClass) => JdkProxy.getInvocationHandler(proxy) match { case syncHandler if syncHandler.getClass.getName.endsWith("SynchronizedHandler") => - val realMscField = SparkClassUtils.classForName( - "org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler") - .getDeclaredField("client") - realMscField.setAccessible(true) - val realMsc = realMscField.get(syncHandler).asInstanceOf[IMetaStoreClient] - configureMaxThriftMessageSize(hiveConf, realMsc) + val realMsc = getFieldValue[IMetaStoreClient](syncHandler, "client") + configureMaxThriftMessageSize(hiveConf, realMsc, maxMessageSize) case retryHandler: RetryingMetaStoreClient => - val realMscField = classOf[RetryingMetaStoreClient].getDeclaredField("base") - realMscField.setAccessible(true) - val realMsc = realMscField.get(retryHandler).asInstanceOf[IMetaStoreClient] - configureMaxThriftMessageSize(hiveConf, realMsc) + val realMsc = getFieldValue[IMetaStoreClient](retryHandler, "base") + configureMaxThriftMessageSize(hiveConf, realMsc, maxMessageSize) case _ => } - case msc: HiveMetaStoreClient if !msc.isLocalMetaStore => msc.getTTransport match { case t: TEndpointTransport => - // The configuration is added in HIVE-26633 (4.0.0) - val maxThriftMessageSize = HiveConf.toSizeBytes( - hiveConf.get("hive.thrift.client.max.message.size", "1gb")).toInt - if (t.getConfiguration.getMaxMessageSize != maxThriftMessageSize) { - t.getConfiguration.setMaxMessageSize(maxThriftMessageSize) - val resetConsumedMessageSizeMethod = classOf[TEndpointTransport] - .getDeclaredMethod("resetConsumedMessageSize", classOf[Long]) - resetConsumedMessageSizeMethod.setAccessible(true) - resetConsumedMessageSizeMethod.invoke(t, Long.box(-1L)) + val currentMaxMessageSize = t.getConfiguration.getMaxMessageSize + if (currentMaxMessageSize != maxMessageSize) { + logDebug("Change the current metastore client thrift max message size from " + + s"$currentMaxMessageSize to $maxMessageSize") + t.getConfiguration.setMaxMessageSize(maxMessageSize) + findMethod(classOf[TEndpointTransport], "resetConsumedMessageSize", classOf[Long]) + .invoke(t, Long.box(-1L)) } case _ => } From 2de7f3849b434f705951ed397dd8db45ccf8c75d Mon Sep 17 00:00:00 2001 From: Cheng Pan Date: Tue, 25 Feb 2025 14:42:12 +0800 Subject: [PATCH 3/4] wip --- .../sql/hive/client/HiveClientImpl.scala | 38 ++++++++----------- 1 file changed, 15 insertions(+), 23 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index 261d0bccd31dc..f6fe47c72bdd1 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.client import java.io.{OutputStream, PrintStream} import java.lang.{Iterable => JIterable} -import java.lang.reflect.{InvocationTargetException, Method, Proxy => JdkProxy} +import java.lang.reflect.{InvocationTargetException, Proxy => JdkProxy} import java.nio.charset.StandardCharsets.UTF_8 import java.util.{HashMap => JHashMap, Locale, Map => JMap} import java.util.concurrent.TimeUnit._ @@ -44,7 +44,6 @@ import org.apache.hadoop.hive.serde.serdeConstants import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe import org.apache.hadoop.security.UserGroupInformation -import org.apache.thrift.transport.TEndpointTransport import org.apache.spark.{SparkConf, SparkException, SparkThrowable} import org.apache.spark.deploy.SparkHadoopUtil.SOURCE_SPARK @@ -1435,12 +1434,6 @@ private[hive] object HiveClientImpl extends Logging { field.get(obj).asInstanceOf[T] } - private def findMethod(klass: Class[_], name: String, args: Class[_]*): Method = { - val method = klass.getDeclaredMethod(name, args: _*) - method.setAccessible(true) - method - } - // SPARK-49489: a surgery for Hive 2.3.10 due to lack of HIVE-26633 private def configureMaxThriftMessageSize( hiveConf: HiveConf, msClient: IMetaStoreClient, maxMessageSize: Int): Unit = try { @@ -1458,24 +1451,23 @@ private[hive] object HiveClientImpl extends Logging { case _ => } case msc: HiveMetaStoreClient if !msc.isLocalMetaStore => - msc.getTTransport match { - case t: TEndpointTransport => - val currentMaxMessageSize = t.getConfiguration.getMaxMessageSize - if (currentMaxMessageSize != maxMessageSize) { - logDebug("Change the current metastore client thrift max message size from " + - s"$currentMaxMessageSize to $maxMessageSize") - t.getConfiguration.setMaxMessageSize(maxMessageSize) - findMethod(classOf[TEndpointTransport], "resetConsumedMessageSize", classOf[Long]) - .invoke(t, Long.box(-1L)) - } - case _ => + val tTransport = msc.getTTransport + // The method is added in THRIFT-5237 (0.14.0) + val tConf = tTransport.getConfiguration + val currentMaxMessageSize = tConf.getMaxMessageSize + if (currentMaxMessageSize != maxMessageSize) { + logDebug("Change the current metastore client thrift max message size from " + + s"$currentMaxMessageSize to $maxMessageSize") + tConf.setMaxMessageSize(maxMessageSize) + // This internally call TEndpointTransport#resetConsumedMessageSize(-1L) to + // apply the updated maxMessageSize + tTransport.updateKnownMessageSize(0L) } case _ => // do nothing } } catch { - // TEndpointTransport is added in THRIFT-5237 (0.14.0), for Hive versions that use older - // Thrift library (e.g. Hive 2.3.9 uses Thrift 0.9.3), which aren't affected by THRIFT-5237 - // and don't need to apply HIVE-26633 - case _: NoClassDefFoundError => // do nothing + // For Hive versions that use older Thrift library (e.g. Hive 2.3.9 uses Thrift 0.9.3), + // which aren't affected by THRIFT-5237 and don't need to apply HIVE-26633 + case _: NoSuchMethodError => // do nothing } } From 354b96efd4d047d689658ff77e2deb0f7be15f1d Mon Sep 17 00:00:00 2001 From: Cheng Pan Date: Tue, 25 Feb 2025 15:57:10 +0800 Subject: [PATCH 4/4] fix --- .../sql/hive/client/HiveClientImpl.scala | 57 ++++++++++++------- 1 file changed, 38 insertions(+), 19 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index f6fe47c72bdd1..90f8a3a85d70c 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -24,6 +24,7 @@ import java.nio.charset.StandardCharsets.UTF_8 import java.util.{HashMap => JHashMap, Locale, Map => JMap} import java.util.concurrent.TimeUnit._ +import scala.annotation.tailrec import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.jdk.CollectionConverters._ @@ -43,7 +44,9 @@ import org.apache.hadoop.hive.ql.session.SessionState import org.apache.hadoop.hive.serde.serdeConstants import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe +import org.apache.hadoop.hive.thrift.TFilterTransport import org.apache.hadoop.security.UserGroupInformation +import org.apache.thrift.transport.{TEndpointTransport, TTransport} import org.apache.spark.{SparkConf, SparkException, SparkThrowable} import org.apache.spark.deploy.SparkHadoopUtil.SOURCE_SPARK @@ -1434,40 +1437,56 @@ private[hive] object HiveClientImpl extends Logging { field.get(obj).asInstanceOf[T] } + private def getFieldValue[T](obj: Any, clazz: Class[_], fieldName: String): T = { + val field = clazz.getDeclaredField(fieldName) + field.setAccessible(true) + field.get(obj).asInstanceOf[T] + } + // SPARK-49489: a surgery for Hive 2.3.10 due to lack of HIVE-26633 private def configureMaxThriftMessageSize( hiveConf: HiveConf, msClient: IMetaStoreClient, maxMessageSize: Int): Unit = try { msClient match { // Hive uses Java Dynamic Proxy to enhance the MetaStoreClient to support synchronization - // and retrying, we should unwrap and access the real MetaStoreClient instance firstly + // and retrying, we should unwrap and access the underlying MetaStoreClient instance firstly case proxy if JdkProxy.isProxyClass(proxy.getClass) => JdkProxy.getInvocationHandler(proxy) match { case syncHandler if syncHandler.getClass.getName.endsWith("SynchronizedHandler") => - val realMsc = getFieldValue[IMetaStoreClient](syncHandler, "client") - configureMaxThriftMessageSize(hiveConf, realMsc, maxMessageSize) + val wrappedMsc = getFieldValue[IMetaStoreClient](syncHandler, "client") + configureMaxThriftMessageSize(hiveConf, wrappedMsc, maxMessageSize) case retryHandler: RetryingMetaStoreClient => - val realMsc = getFieldValue[IMetaStoreClient](retryHandler, "base") - configureMaxThriftMessageSize(hiveConf, realMsc, maxMessageSize) + val wrappedMsc = getFieldValue[IMetaStoreClient](retryHandler, "base") + configureMaxThriftMessageSize(hiveConf, wrappedMsc, maxMessageSize) case _ => } case msc: HiveMetaStoreClient if !msc.isLocalMetaStore => - val tTransport = msc.getTTransport - // The method is added in THRIFT-5237 (0.14.0) - val tConf = tTransport.getConfiguration - val currentMaxMessageSize = tConf.getMaxMessageSize - if (currentMaxMessageSize != maxMessageSize) { - logDebug("Change the current metastore client thrift max message size from " + - s"$currentMaxMessageSize to $maxMessageSize") - tConf.setMaxMessageSize(maxMessageSize) - // This internally call TEndpointTransport#resetConsumedMessageSize(-1L) to - // apply the updated maxMessageSize - tTransport.updateKnownMessageSize(0L) + @tailrec + def configure(t: TTransport): Unit = t match { + // Unwrap and access the underlying TTransport when security enabled (Kerberos) + case tTransport: TFilterTransport => + val wrappedTTransport = getFieldValue[TTransport]( + tTransport, classOf[TFilterTransport], "wrapped") + configure(wrappedTTransport) + case tTransport: TEndpointTransport => + val tConf = tTransport.getConfiguration + val currentMaxMessageSize = tConf.getMaxMessageSize + if (currentMaxMessageSize != maxMessageSize) { + logDebug("Change the current metastore client thrift max message size from " + + s"$currentMaxMessageSize to $maxMessageSize") + tConf.setMaxMessageSize(maxMessageSize) + // This internally call TEndpointTransport#resetConsumedMessageSize(-1L) to + // apply the updated maxMessageSize + tTransport.updateKnownMessageSize(0L) + } + case _ => } + configure(msc.getTTransport) case _ => // do nothing } } catch { - // For Hive versions that use older Thrift library (e.g. Hive 2.3.9 uses Thrift 0.9.3), - // which aren't affected by THRIFT-5237 and don't need to apply HIVE-26633 - case _: NoSuchMethodError => // do nothing + // TEndpointTransport is added in THRIFT-5237 (0.14.0), for Hive versions that use older + // Thrift library (e.g. Hive 2.3.9 uses Thrift 0.9.3), which aren't affected by THRIFT-5237 + // and don't need to apply HIVE-26633 + case _: NoClassDefFoundError => // do nothing } }