amaliujia commented on code in PR #38406:
URL: https://github.com/apache/spark/pull/38406#discussion_r1006399748


##########
connector/connect/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala:
##########
@@ -30,181 +36,140 @@ import 
org.apache.spark.sql.catalyst.plans.logical.LocalRelation
  */
 class SparkConnectProtoSuite extends PlanTest with SparkConnectPlanTest {
 
-  lazy val connectTestRelation = createLocalRelationProto(Seq($"id".int, 
$"name".string))
+  lazy val connectTestRelation =
+    createLocalRelationProto(
+      Seq(AttributeReference("id", IntegerType)(), AttributeReference("name", 
StringType)()))
 
-  lazy val connectTestRelation2 = createLocalRelationProto(
-    Seq($"key".int, $"value".int, $"name".string))
+  lazy val connectTestRelation2 =
+    createLocalRelationProto(
+      Seq(AttributeReference("id", IntegerType)(), AttributeReference("name", 
StringType)()))
 
-  lazy val sparkTestRelation: LocalRelation = LocalRelation($"id".int, 
$"name".string)
+  lazy val sparkTestRelation: LocalRelation =
+    LocalRelation(
+      AttributeReference("id", IntegerType)(),
+      AttributeReference("name", StringType)())
 
   lazy val sparkTestRelation2: LocalRelation =
-    LocalRelation($"key".int, $"value".int, $"name".string)
+    LocalRelation(
+      AttributeReference("id", IntegerType)(),
+      AttributeReference("name", StringType)())
 
   test("Basic select") {
-    val connectPlan = {
-      // TODO: Scala only allows one implicit per scope so we keep proto 
implicit imports in
-      // this scope. Need to find a better way to make two implicits work in 
the same scope.
-      import org.apache.spark.sql.connect.dsl.expressions._
-      import org.apache.spark.sql.connect.dsl.plans._
-      transform(connectTestRelation.select("id".protoAttr))
-    }
-    val sparkPlan = sparkTestRelation.select($"id")
-    comparePlans(connectPlan.analyze, sparkPlan.analyze, false)
+    val connectPlan = connectTestRelation.select("id".protoAttr)
+    val sparkPlan = sparkTestRelation.select("id")

Review Comment:
   Done. Just use `spark.createDataFrame` which returns the DataFrame. Later if 
there is a need we can also use this API to populate some data and then test 
result.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to