eternal-heart-k commented on issue #291:
URL: 
https://github.com/apache/doris-spark-connector/issues/291#issuecomment-2908784499

   I have encountered a similar problem... where clause contains date/datetime 
type ~
   
   create table sql like this:
   ``` sql
   CREATE TABLE `user_tag_test` (
       `base_time` datetime NOT NULL,
       `user_id` varchar(20) NOT NULL,
       `distinct_id` varchar(255) NULL,
       `value` varchar(255) NULL
   ) ENGINE = OLAP
   DUPLICATE KEY(`base_time`)
   COMMENT 'test'
   PARTITION BY RANGE (`base_time`)()
   DISTRIBUTED BY HASH(`user_id`)
   properties(
       "dynamic_partition.enable" = "true",
       "dynamic_partition.prefix" = "p",
       "dynamic_partition.start" = "-7",
       "dynamic_partition.end" = "3",
       "dynamic_partition.time_unit" = "day",
       "replication_num" = "1"
   );
   ```
   pyspark code
   ``` python
   spark.read.format("doris")\
       .option('doris.fenodes', 'xxx')\
       .option('user', 'xxx')\
       .option('password', 'xxx')\
       .option('doris.table.identifier', 'xxx.user_tag_test')\
       .load().createOrReplaceTempView("temp")
   
   query = spark.sql(f"""
   SELECT *
   FROM temp
   where base_time = current_date()
   """)
   
   # But when I write this, it is executing successfully
   # query = spark.sql(f"""
   # SELECT *
   # FROM temp
   # where user_id = '123'
   # """)
   
   query.show(10)
   print(query.count())
   ```
   ---------------------------------------------------------------------------
   Py4JJavaError: An error occurred while calling o241.showString.
   : java.lang.RuntimeException: query plan request failed
        at 
org.apache.doris.spark.client.DorisFrontendClient.lambda$getQueryPlan$6(DorisFrontendClient.java:328)
        at 
org.apache.doris.spark.client.DorisFrontendClient.requestFrontends(DorisFrontendClient.java:162)
        at 
org.apache.doris.spark.client.DorisFrontendClient.requestFrontends(DorisFrontendClient.java:152)
        at 
org.apache.doris.spark.client.DorisFrontendClient.getQueryPlan(DorisFrontendClient.java:310)
        at 
org.apache.doris.spark.client.read.ReaderPartitionGenerator.generatePartitions(ReaderPartitionGenerator.java:81)
        at 
org.apache.doris.spark.read.AbstractDorisScan.planInputPartitions(AbstractDorisScan.scala:40)
        at 
org.apache.spark.sql.execution.datasources.v2.BatchScanExec.inputPartitions$lzycompute(BatchScanExec.scala:54)
        at 
org.apache.spark.sql.execution.datasources.v2.BatchScanExec.inputPartitions(BatchScanExec.scala:54)
        at 
org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExecBase.supportsColumnar(DataSourceV2ScanExecBase.scala:142)
        at 
org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExecBase.supportsColumnar$(DataSourceV2ScanExecBase.scala:141)
        at 
org.apache.spark.sql.execution.datasources.v2.BatchScanExec.supportsColumnar(BatchScanExec.scala:36)
        at 
org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy.apply(DataSourceV2Strategy.scala:143)
        at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$1(QueryPlanner.scala:63)
        at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:486)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:492)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:491)
        at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93)
        at 
org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69)
        at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$3(QueryPlanner.scala:78)
        at 
scala.collection.TraversableOnce$folder$1.apply(TraversableOnce.scala:196)
        at 
scala.collection.TraversableOnce$folder$1.apply(TraversableOnce.scala:194)
        at scala.collection.Iterator.foreach(Iterator.scala:943)
        at scala.collection.Iterator.foreach$(Iterator.scala:943)
        at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
        at scala.collection.TraversableOnce.foldLeft(TraversableOnce.scala:199)
        at scala.collection.TraversableOnce.foldLeft$(TraversableOnce.scala:192)
        at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1431)
        at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$2(QueryPlanner.scala:75)
        at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:486)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:492)
        at 
org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93)
        at 
org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:69)
        at 
org.apache.spark.sql.execution.QueryExecution$.createSparkPlan(QueryExecution.scala:459)
        at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$sparkPlan$1(QueryExecution.scala:145)
        at 
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
        at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:185)
        at 
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:510)
        at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:185)
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
        at 
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:184)
        at 
org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:145)
        at 
org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:138)
        at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executedPlan$1(QueryExecution.scala:158)
        at 
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
        at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:185)
        at 
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:510)
        at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:185)
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
        at 
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:184)
        at 
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:158)
        at 
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:151)
        at 
org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:204)
        at 
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:249)
        at 
org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:218)
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:103)
        at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)
        at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
        at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
        at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3856)
        at org.apache.spark.sql.Dataset.head(Dataset.scala:2863)
        at org.apache.spark.sql.Dataset.take(Dataset.scala:3084)
        at org.apache.spark.sql.Dataset.getRows(Dataset.scala:288)
        at org.apache.spark.sql.Dataset.showString(Dataset.scala:327)
        at sun.reflect.GeneratedMethodAccessor79.invoke(Unknown Source)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
        at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
        at py4j.Gateway.invoke(Gateway.java:282)
        at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
        at py4j.commands.CallCommand.execute(CallCommand.java:79)
        at 
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
        at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
        at java.lang.Thread.run(Thread.java:750)
   Caused by: org.apache.doris.spark.exception.DorisException: query plan 
failed, exception: errCode = 2, detailMessage = Incorrect datetime value: 
CAST(1748188800000000 AS datetimev2(0)) in expression: (`base_time` = 
CAST(1748188800000000 AS datetimev2(0)))
        at 
org.apache.doris.spark.client.DorisFrontendClient.lambda$getQueryPlan$6(DorisFrontendClient.java:324)
        ... 74 more


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to