leesf commented on a change in pull request #962: [HUDI-251] JDBC incremental
load to HUDI DeltaStreamer
URL: https://github.com/apache/incubator-hudi/pull/962#discussion_r336550485
##########
File path:
hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/JDBCSource.java
##########
@@ -0,0 +1,238 @@
+package org.apache.hudi.utilities.sources;
+
+import java.util.Arrays;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hudi.DataSourceUtils;
+import org.apache.hudi.common.util.Option;
+import org.apache.hudi.common.util.StringUtils;
+import org.apache.hudi.common.util.TypedProperties;
+import org.apache.hudi.common.util.collection.Pair;
+import org.apache.hudi.exception.HoodieException;
+import org.apache.hudi.utilities.schema.SchemaProvider;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.sql.Column;
+import org.apache.spark.sql.DataFrameReader;
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.Row;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.functions;
+import org.apache.spark.sql.types.DataTypes;
+import org.jetbrains.annotations.NotNull;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class JDBCSource extends RowSource {
+
+ private static Logger LOG = LoggerFactory.getLogger(JDBCSource.class);
+
+ public JDBCSource(TypedProperties props, JavaSparkContext sparkContext,
SparkSession sparkSession,
+ SchemaProvider schemaProvider) {
+ super(props, sparkContext, sparkSession, schemaProvider);
+ }
+
+ private static DataFrameReader validatePropsAndGetDataFrameReader(final
SparkSession session,
+ final TypedProperties properties)
+ throws HoodieException {
+ FSDataInputStream passwordFileStream = null;
+ try {
+ DataFrameReader dataFrameReader = session.read().format("jdbc");
+ dataFrameReader = dataFrameReader.option(Config.URL_PROP,
properties.getString(Config.URL));
+ dataFrameReader = dataFrameReader.option(Config.USER_PROP,
properties.getString(Config.USER));
+ dataFrameReader = dataFrameReader.option(Config.DRIVER_PROP,
properties.getString(Config.DRIVER_CLASS));
+ dataFrameReader = dataFrameReader
+ .option(Config.RDBMS_TABLE_PROP,
properties.getString(Config.RDBMS_TABLE_NAME));
+
+ if (!properties.containsKey(Config.PASSWORD)) {
+ if (properties.containsKey(Config.PASSWORD_FILE)) {
+ if
(!StringUtils.isNullOrEmpty(properties.getString(Config.PASSWORD_FILE))) {
+ LOG.info("Reading password for password file {}",
properties.getString(Config.PASSWORD_FILE));
+ FileSystem fileSystem = FileSystem.get(new Configuration());
+ passwordFileStream = fileSystem.open(new
Path(properties.getString(Config.PASSWORD_FILE)));
+ byte[] bytes = new byte[passwordFileStream.available()];
+ passwordFileStream.read(bytes);
+ dataFrameReader = dataFrameReader.option(Config.PASSWORD_PROP, new
String(bytes));
+ } else {
+ throw new IllegalArgumentException(
+ String.format("%s property cannot be null or empty",
Config.PASSWORD_FILE));
+ }
+ } else {
+ throw new IllegalArgumentException(String.format("JDBCSource needs
either a %s or %s to connect to RDBMS "
+ + "datasource", Config.PASSWORD_FILE, Config.PASSWORD));
+ }
+ } else if
(!StringUtils.isNullOrEmpty(properties.getString(Config.PASSWORD))) {
+ dataFrameReader = dataFrameReader.option(Config.PASSWORD_PROP,
properties.getString(Config.PASSWORD));
+ } else {
+ throw new IllegalArgumentException(String.format("%s cannot be null or
empty. ", Config.PASSWORD));
+ }
+// if (properties.containsKey(Config.EXTRA_OPTIONS)) {
+// if
(!StringUtils.isNullOrEmpty(properties.getString(Config.EXTRA_OPTIONS))) {
+// LOG.info("Setting {}", Config.EXTRA_OPTIONS);
+// String[] options =
properties.getString(Config.EXTRA_OPTIONS).split(",");
+// for (String option : options) {
+// if (!StringUtils.isNullOrEmpty(option)) {
+// String[] kv = option.split("=");
+// if (kv.length == 2) {
+// dataFrameReader = dataFrameReader.option(kv[0], kv[1]);
+// LOG.info("{} = {} has been set for JDBC pull ", kv[0],
kv[1]);
+// } else {
+// LOG.warn("Option {} not set because it does not have a
value", kv[0], new IllegalArgumentException(
+// String.format(" %s should have a corresponding value
separated by \"=\"", kv[0])));
+// }
+// }
+// }
+// }
+// }
+
+ properties.entrySet().stream().forEach(i->
System.out.println(properties.get(i)));
+ if (properties.getBoolean(Config.IS_INCREMENTAL)) {
+ DataSourceUtils.checkRequiredProperties(properties,
Arrays.asList(Config.INCREMENTAL_COLUMN));
+ }
+ return dataFrameReader;
+ } catch (Exception e) {
+ throw new HoodieException(e);
+ } finally {
+ IOUtils.closeStream(passwordFileStream);
+ }
+ }
+
+ @Override
+ protected Pair<Option<Dataset<Row>>, String> fetchNextBatch(Option<String>
lastCkptStr, long sourceLimit) {
+ // final String ppdQuery = "(select * from %s where %s >=
TIMESTAMP(\"%s\")) rdbms_table";
+ final String ppdQuery = "(select * from %s where %s >= \"%s\")
rdbms_table";
Review comment:
need indentation
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services