pan3793 commented on code in PR #52995:
URL: https://github.com/apache/spark/pull/52995#discussion_r2513938399


##########
sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectDatabaseMetaData.scala:
##########
@@ -360,15 +361,113 @@ class SparkConnectDatabaseMetaData(conn: 
SparkConnectConnection) extends Databas
     new SparkConnectResultSet(df.collectResult())
   }
 
-  override def getTableTypes: ResultSet =
-    throw new SQLFeatureNotSupportedException
+  override def getTableTypes: ResultSet = {
+    conn.checkOpen()
+
+    val df = TABLE_TYPES.toDF("TABLE_TYPE")
+      .orderBy("TABLE_TYPE")
+    new SparkConnectResultSet(df.collectResult())
+  }
+
+  // Schema of the returned DataFrame is:
+  // |-- TABLE_CAT: string (nullable = false)
+  // |-- TABLE_SCHEM: string (nullable = false)
+  // |-- TABLE_NAME: string (nullable = false)
+  // |-- TABLE_TYPE: string (nullable = false)
+  // |-- REMARKS: string (nullable = false)
+  // |-- TYPE_CAT: string (nullable = false)
+  // |-- TYPE_SCHEM: string (nullable = false)
+  // |-- TYPE_NAME: string (nullable = false)
+  // |-- SELF_REFERENCING_COL_NAME: string (nullable = false)
+  // |-- REF_GENERATION: string (nullable = false)
+  private def getTablesDataFrame(
+      catalog: String,
+      schemaPattern: String,
+      tableNamePattern: String): connect.DataFrame = {
+
+    val catalogSchemas = getSchemasDataFrame(catalog, schemaPattern).collect()
+      .map { row => (row.getString(1), row.getString(0)) }
+
+    val tableNameFilterExpr = if (isNullOrWildcard(tableNamePattern)) {
+      lit(true)
+    } else {
+      $"TABLE_NAME".like(tableNamePattern)
+    }
+
+    val emptyDf = conn.spark.emptyDataFrame
+      .withColumn("TABLE_CAT", lit(""))
+      .withColumn("TABLE_SCHEM", lit(""))
+      .withColumn("TABLE_NAME", lit(""))
+      .withColumn("TABLE_TYPE", lit(""))
+      .withColumn("REMARKS", lit(""))
+      .withColumn("TYPE_CAT", lit(""))
+      .withColumn("TYPE_SCHEM", lit(""))
+      .withColumn("TYPE_NAME", lit(""))
+      .withColumn("SELF_REFERENCING_COL_NAME", lit(""))
+      .withColumn("REF_GENERATION", lit(""))
+
+    catalogSchemas.map { case (catalog, schema) =>
+      val viewDf = try {
+        conn.spark
+          .sql(s"SHOW VIEWS IN ${quoteNameParts(Seq(catalog, schema))}")
+          .select($"namespace".as("TABLE_SCHEM"), $"viewName".as("TABLE_NAME"))
+          .filter(tableNameFilterExpr)
+      } catch {
+        case st: SparkThrowable if st.getCondition == 
"MISSING_CATALOG_ABILITY.VIEWS" =>

Review Comment:
   will revise `getSchemas` too after SPARK-54303 landing.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to