Github user dilipbiswal commented on a diff in the pull request:
https://github.com/apache/spark/pull/22560#discussion_r220751466
--- Diff:
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtilsSuite.scala
---
@@ -65,4 +67,60 @@ class JdbcUtilsSuite extends SparkFunSuite {
}
assert(mismatchedInput.getMessage.contains("mismatched input '.'
expecting"))
}
+
+ test("Custom ConnectionFactory falls back to default when no factory
specified") {
+ val options = new JDBCOptions(Map[String, String](
+ "url" -> "jdbc:mysql://foo.com/bar",
+ "dbtable" -> "table")
+ )
+ assert(options.connectionFactoryProvider eq
DefaultConnectionFactoryProvider)
+ }
+
+ test("JdbcUtils uses the specified connection factory") {
+ val options = new JDBCOptions(Map[String, String](
+ "url" -> "jdbc:mysql://foo.com/bar",
+ "dbtable" -> "table",
+ JDBCOptions.JDBC_CONNECTION_FACTORY_PROVIDER ->
+ "org.apache.spark.sql.execution.datasources.jdbc.TestFactory")
+ )
+ val x = intercept[RuntimeException] {
+ JdbcUtils.createConnectionFactory(options)()
+ }
+ assert(x.getMessage == "This message will be tested in test")
+ }
+
+ test("invalid connection factory throws IllegalArgumentException") {
+
+ val nonexisting = intercept[IllegalArgumentException] {
+ new JDBCOptions(Map[String, String](
+ "url" -> "jdbc:mysql://foo.com/bar",
+ "dbtable" -> "table",
+ JDBCOptions.JDBC_CONNECTION_FACTORY_PROVIDER -> "notexistingclass")
+ )
+ }
+ assert(nonexisting.getMessage == "notexistingclass is not a valid
ConnectionFactoryProvider")
+
+ val missingTrait = intercept[IllegalArgumentException] {
+ new JDBCOptions(Map[String, String](
+ "url" -> "jdbc:mysql://foo.com/bar",
+ "dbtable" -> "table",
+ JDBCOptions.JDBC_CONNECTION_FACTORY_PROVIDER ->
+ "org.apache.spark.sql.execution.datasources.jdbc.BadFactory")
+ )
+ }
+ assert(missingTrait.getMessage ==
+ "org.apache.spark.sql.execution.datasources.jdbc.BadFactory" +
+ " is not a valid ConnectionFactoryProvider")
+
+ }
+}
+
+// this one does not implement ConnectionFactoryProvider
+class BadFactory {
+
+}
+
+class TestFactory extends ConnectionFactoryProvider {
+ override def createConnectionFactory(options: JDBCOptions): () =>
Connection =
--- End diff --
> Without these changes we had to copy most of the spark jdbc package into
our own codebase
to allow us to create our own connection factory
Trying to understand. So here we are passing JDBCOptions which is a spark
class. I thought the intention was to make the custom connection factory code
to not depend on spark classes ?
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]