diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala index dd680e6bd4a87..551edb2e24de0 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala @@ -60,6 +60,8 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { conn.prepareStatement("INSERT INTO numbers VALUES (b'0', b'1000100101', " + "17, 77777, 123456789, 123456789012345, 123456789012345.123456789012345, " + "42.75, 1.0000000000000002, -128)").executeUpdate() + conn.prepareStatement("INSERT INTO numbers VALUES (null, null, null, null, null," + + "null, null, null, null, null)").executeUpdate() conn.prepareStatement("CREATE TABLE unsigned_numbers (" + "tiny TINYINT UNSIGNED, small SMALLINT UNSIGNED, med MEDIUMINT UNSIGNED," + @@ -337,6 +339,15 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { checkAnswer(df, Row(Array[Byte](0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))) } + + test("SPARK-47666: Check nulls for result set getters") { + Seq("true", "false").foreach { flag => + withSQLConf(SQLConf.LEGACY_MYSQL_BIT_ARRAY_MAPPING_ENABLED.key -> flag) { + val nulls = spark.read.jdbc(jdbcUrl, "numbers", new Properties).tail(1).head + assert(nulls === Row(null, null, null, null, null, null, null, null, null, null)) + } + } + } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala index 27ef8bd757725..8367264a5975f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala @@ -438,14 +438,16 @@ object JdbcUtils extends Logging with SQLConfHelper { case LongType if metadata.contains("binarylong") => (rs: ResultSet, row: InternalRow, pos: Int) => - val bytes = rs.getBytes(pos + 1) - var ans = 0L - var j = 0 - while (j < bytes.length) { - ans = 256 * ans + (255 & bytes(j)) - j = j + 1 - } - row.setLong(pos, ans) + val l = nullSafeConvert[Array[Byte]](rs.getBytes(pos + 1), bytes => { + var ans = 0L + var j = 0 + while (j < bytes.length) { + ans = 256 * ans + (255 & bytes(j)) + j = j + 1 + } + ans + }) + row.update(pos, l) case LongType => (rs: ResultSet, row: InternalRow, pos: Int) =>