stoty commented on a change in pull request #69:
URL: https://github.com/apache/phoenix-connectors/pull/69#discussion_r799314827



##########
File path: 
phoenix-spark-base/src/main/java/org/apache/phoenix/spark/datasource/v2/PhoenixDataSource.java
##########
@@ -17,42 +17,76 @@
  */
 package org.apache.phoenix.spark.datasource.v2;
 
-import java.util.Optional;
-import java.util.Properties;
-
-import org.apache.phoenix.spark.datasource.v2.reader.PhoenixDataSourceReader;
-import org.apache.phoenix.spark.datasource.v2.writer.PhoenixDataSourceWriter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.spark.sql.SaveMode;
+import org.apache.phoenix.spark.SparkSchemaUtil;
+import org.apache.phoenix.util.ColumnInfo;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.spark.sql.connector.catalog.Table;
+import org.apache.spark.sql.connector.catalog.TableProvider;
+import org.apache.spark.sql.connector.expressions.Transform;
 import org.apache.spark.sql.sources.DataSourceRegister;
-import org.apache.spark.sql.sources.v2.DataSourceOptions;
-import org.apache.spark.sql.sources.v2.DataSourceV2;
-import org.apache.spark.sql.sources.v2.ReadSupport;
-import org.apache.spark.sql.sources.v2.WriteSupport;
-import org.apache.spark.sql.sources.v2.reader.DataSourceReader;
-import org.apache.spark.sql.sources.v2.writer.DataSourceWriter;
 import org.apache.spark.sql.types.StructType;
+import org.apache.spark.sql.util.CaseInsensitiveStringMap;
+import scala.collection.JavaConverters;
+import scala.collection.Seq;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
 
 /**
  * Implements the DataSourceV2 api to read and write from Phoenix tables
  */
-public class PhoenixDataSource  implements DataSourceV2,  ReadSupport, 
WriteSupport, DataSourceRegister {
+public class PhoenixDataSource implements TableProvider, DataSourceRegister {
 
     private static final Logger logger = 
LoggerFactory.getLogger(PhoenixDataSource.class);
+    public static final String TABLE_KEY = "table";
     public static final String SKIP_NORMALIZING_IDENTIFIER = 
"skipNormalizingIdentifier";
     public static final String ZOOKEEPER_URL = "zkUrl";
     public static final String PHOENIX_CONFIGS = "phoenixconfigs";
+    protected StructType schema;
+    private CaseInsensitiveStringMap options;
 
     @Override
-    public DataSourceReader createReader(DataSourceOptions options) {
-        return new PhoenixDataSourceReader(options);
+    public StructType inferSchema(CaseInsensitiveStringMap options){
+        if (options.get("table") == null) {

Review comment:
       I can still find a lot of "table" strings in the code.
   It should only have instance, where the TABLE_KEY constant is defined.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@phoenix.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to