vinothchandar commented on a change in pull request #917: [HUDI-251] JDBC
incremental load to HUDI with DeltaStreamer
URL: https://github.com/apache/incubator-hudi/pull/917#discussion_r329190769
##########
File path:
hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/JDBCSource.java
##########
@@ -0,0 +1,233 @@
+package org.apache.hudi.utilities.sources;
+
+import java.util.Arrays;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hudi.DataSourceUtils;
+import org.apache.hudi.common.util.Option;
+import org.apache.hudi.common.util.StringUtils;
+import org.apache.hudi.common.util.TypedProperties;
+import org.apache.hudi.common.util.collection.Pair;
+import org.apache.hudi.exception.HoodieException;
+import org.apache.hudi.utilities.schema.SchemaProvider;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.sql.Column;
+import org.apache.spark.sql.DataFrameReader;
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.Row;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.functions;
+import org.apache.spark.sql.types.DataTypes;
+import org.jetbrains.annotations.NotNull;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class JDBCSource extends RowSource {
+
+ private static Logger LOG = LoggerFactory.getLogger(JDBCSource.class);
+
+ private final String ppdQuery = "(select * from %s where %s >= \" %s \")
rdbms_table";
+
+
+ public JDBCSource(TypedProperties props, JavaSparkContext sparkContext,
SparkSession sparkSession,
+ SchemaProvider schemaProvider) {
+ super(props, sparkContext, sparkSession, schemaProvider);
+ }
+
+ private static DataFrameReader validatePropsAndGetDataFrameReader(final
SparkSession session,
+ final TypedProperties properties)
+ throws HoodieException {
+ FSDataInputStream passwordFileStream = null;
+ try {
+ DataFrameReader dataFrameReader = session.read().format("jdbc");
+ dataFrameReader = dataFrameReader.option(Config.URL_PROP,
properties.getString(Config.URL));
+ dataFrameReader = dataFrameReader.option(Config.USER_PROP,
properties.getString(Config.USER));
+ dataFrameReader = dataFrameReader.option(Config.DRIVER_PROP,
properties.getString(Config.DRIVER_CLASS));
+ dataFrameReader = dataFrameReader
+ .option(Config.RDBMS_TABLE_PROP,
properties.getString(Config.RDBMS_TABLE_NAME));
+
+ if (!properties.containsKey(Config.PASSWORD)) {
Review comment:
2c. could be lot simpler, if we just checked one by one and then at the end
, threw the error based on whether password was obtainable
```
String password = null;
if (properties.containsKey(Config.PASSWORD_FILE)) {
password = //set if you can read it from file
}
if (properties.getString(Config.PASSWORD)) {
password = //set value.
}
if (password == null) {
// throw the error
}
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services