amaliujia commented on code in PR #38406:
URL: https://github.com/apache/spark/pull/38406#discussion_r1006353415


##########
connector/connect/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala:
##########
@@ -30,181 +36,140 @@ import 
org.apache.spark.sql.catalyst.plans.logical.LocalRelation
  */
 class SparkConnectProtoSuite extends PlanTest with SparkConnectPlanTest {
 
-  lazy val connectTestRelation = createLocalRelationProto(Seq($"id".int, 
$"name".string))
+  lazy val connectTestRelation =
+    createLocalRelationProto(
+      Seq(AttributeReference("id", IntegerType)(), AttributeReference("name", 
StringType)()))
 
-  lazy val connectTestRelation2 = createLocalRelationProto(
-    Seq($"key".int, $"value".int, $"name".string))
+  lazy val connectTestRelation2 =
+    createLocalRelationProto(
+      Seq(AttributeReference("id", IntegerType)(), AttributeReference("name", 
StringType)()))
 
-  lazy val sparkTestRelation: LocalRelation = LocalRelation($"id".int, 
$"name".string)
+  lazy val sparkTestRelation: LocalRelation =
+    LocalRelation(
+      AttributeReference("id", IntegerType)(),
+      AttributeReference("name", StringType)())
 
   lazy val sparkTestRelation2: LocalRelation =
-    LocalRelation($"key".int, $"value".int, $"name".string)
+    LocalRelation(
+      AttributeReference("id", IntegerType)(),
+      AttributeReference("name", StringType)())
 
   test("Basic select") {
-    val connectPlan = {
-      // TODO: Scala only allows one implicit per scope so we keep proto 
implicit imports in
-      // this scope. Need to find a better way to make two implicits work in 
the same scope.
-      import org.apache.spark.sql.connect.dsl.expressions._
-      import org.apache.spark.sql.connect.dsl.plans._
-      transform(connectTestRelation.select("id".protoAttr))
-    }
-    val sparkPlan = sparkTestRelation.select($"id")
-    comparePlans(connectPlan.analyze, sparkPlan.analyze, false)
+    val connectPlan = connectTestRelation.select("id".protoAttr)
+    val sparkPlan = sparkTestRelation.select("id")

Review Comment:
   I see. Leet me update then.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to