Skip to content

Commit c17af0b

Browse files
committed
class to classic session to convert internalRowRdd to rdd
1 parent f3cad6f commit c17af0b

File tree

1 file changed

+3
-6
lines changed

1 file changed

+3
-6
lines changed

core/shims/spark400/src/main/scala/org/apache/spark/sql/SparkSqlUtils.scala

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,11 @@ package org.apache.spark.sql.spark400
2020
import org.apache.arrow.vector.types.pojo.Schema
2121
import org.apache.spark.TaskContext
2222
import org.apache.spark.api.java.JavaRDD
23-
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
23+
import org.apache.spark.sql.classic.ClassicConversions
24+
import org.apache.spark.sql.{DataFrame, SparkSession}
2425
import org.apache.spark.sql.execution.arrow.ArrowConverters
2526
import org.apache.spark.sql.types._
2627
import org.apache.spark.sql.util.ArrowUtils
27-
import org.apache.spark.sql.catalyst.InternalRow
2828

2929
object SparkSqlUtils {
3030
def toDataFrame(
@@ -43,10 +43,7 @@ object SparkSqlUtils {
4343
largeVarTypes = false,
4444
context = context)
4545
}
46-
val rowRdd = internalRowRdd.map { internalRow =>
47-
Row.fromSeq(internalRow.toSeq(schema))
48-
}
49-
session.createDataFrame(rowRdd.setName("arrow"), schema)
46+
ClassicConversions.castToImpl(session).internalCreateDataFrame(internalRowRdd.setName("arrow"), schema)
5047
}
5148

5249
def toArrowSchema(schema : StructType, timeZoneId : String) : Schema = {

0 commit comments

Comments
 (0)