brkyvz commented on a change in pull request #26957: [SPARK-30314] Add 
identifier and catalog information to DataSourceV2Relation

 File path: sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
 @@ -206,27 +207,29 @@ class DataFrameReader private[sql](sparkSession: 
SparkSession) extends Logging {
       val finalOptions = sessionOptions ++ extraOptions.toMap ++ pathsOption
       val dsOptions = new CaseInsensitiveStringMap(finalOptions.asJava)
-      val table = provider match {
+      val (table, catalogOpt, ident) = provider match {
         case _: SupportsCatalogOptions if userSpecifiedSchema.nonEmpty =>
           throw new IllegalArgumentException(
             s"$source does not support user specified schema. Please don't 
specify the schema.")
         case hasCatalog: SupportsCatalogOptions =>
           val ident = hasCatalog.extractIdentifier(dsOptions)
           val catalog = CatalogV2Util.getTableProviderCatalog(
-            sparkSession.sessionState.catalogManager,
+            catalogManager,
-          catalog.loadTable(ident)
+          (catalog.loadTable(ident), 
catalogManager.catalogIdentifier(catalog), Seq(ident))
         case _ =>
           userSpecifiedSchema match {
-            case Some(schema) => provider.getTable(dsOptions, schema)
-            case _ => provider.getTable(dsOptions)
+            case Some(schema) => (provider.getTable(dsOptions, schema), None, 
+            case _ => (provider.getTable(dsOptions), None, Nil)
 Review comment:
   They will be in the future, not yet though. This is fine for now

This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:

With regards,
Apache Git Services

To unsubscribe, e-mail:
For additional commands, e-mail:

Reply via email to