grundprinzip commented on code in PR #40827:
URL: https://github.com/apache/spark/pull/40827#discussion_r1180756913


##########
connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala:
##########
@@ -119,12 +119,23 @@ class SparkSession private[sql] (
 
   private def createDataset[T](encoder: AgnosticEncoder[T], data: 
Iterator[T]): Dataset[T] = {
     newDataset(encoder) { builder =>
-      val localRelationBuilder = builder.getLocalRelationBuilder
-        .setSchema(encoder.schema.json)
       if (data.nonEmpty) {
         val timeZoneId = conf.get("spark.sql.session.timeZone")
-        val arrowData = ConvertToArrow(encoder, data, timeZoneId, allocator)
-        localRelationBuilder.setData(arrowData)
+        val (arrowData, arrowDataSize) = ConvertToArrow(encoder, data, 
timeZoneId, allocator)
+        if (arrowDataSize <= 
conf.get(SQLConf.LOCAL_RELATION_CACHE_THRESHOLD.key).toInt) {

Review Comment:
   It's kind of weird that we're using an internal API for the client side 
confs. 
   
   Ideally we leverage the existing stub configs in 
`connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/config/ConnectCommon.scala`
 for now?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to