Github user mallman commented on a diff in the pull request:

    https://github.com/apache/spark/pull/17633#discussion_r117834494
  
    --- Diff: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala 
---
    @@ -17,45 +17,656 @@
     
     package org.apache.spark.sql.hive.client
     
    +import java.io.{ByteArrayOutputStream, File, PrintStream}
    +import java.net.URI
    +
     import org.apache.hadoop.conf.Configuration
    -import org.apache.hadoop.hive.conf.HiveConf
    +import org.apache.hadoop.hive.common.StatsSetupConst
    +import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    +import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe
    +import org.apache.hadoop.mapred.TextInputFormat
     
     import org.apache.spark.SparkFunSuite
    +import org.apache.spark.internal.Logging
    +import org.apache.spark.sql.{AnalysisException, Row}
    +import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
    +import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, 
NoSuchPermanentFunctionException}
     import org.apache.spark.sql.catalyst.catalog._
     import org.apache.spark.sql.catalyst.expressions.{AttributeReference, 
EqualTo, Literal}
    -import org.apache.spark.sql.hive.HiveUtils
    +import org.apache.spark.sql.catalyst.util.quietly
    +import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils}
    +import org.apache.spark.sql.hive.test.TestHiveVersion
     import org.apache.spark.sql.types.IntegerType
    +import org.apache.spark.sql.types.StructType
    +import org.apache.spark.util.{MutableURLClassLoader, Utils}
    +
    +/**
    + * A simple set of tests that call the methods of a [[HiveClient]], 
loading different version
    + * of hive from maven central.  These tests are simple in that they are 
mostly just testing to make
    + * sure that reflective calls are not throwing NoSuchMethod error, but the 
actually functionality
    + * is not fully tested.
    + */
    +class HiveClientSuite(version: String) extends HiveVersionSuite(version) 
with HiveClientVersions {
    +  /**
    +   * Creates a temporary directory, which is then passed to `f` and will 
be deleted after `f`
    +   * returns.
    +   */
    +  protected def withTempDir(f: File => Unit): Unit = {
    +    val dir = Utils.createTempDir().getCanonicalFile
    +    try f(dir) finally Utils.deleteRecursively(dir)
    +  }
    +
    +  /**
    +   * Drops table `tableName` after calling `f`.
    +   */
    +  protected def withTable(tableNames: String*)(f: => Unit): Unit = {
    +    try f finally {
    +      tableNames.foreach { name =>
    +        versionSpark.sql(s"DROP TABLE IF EXISTS $name")
    +      }
    +    }
    +  }
    +
    +  private def getNestedMessages(e: Throwable): String = {
    +    var causes = ""
    +    var lastException = e
    +    while (lastException != null) {
    +      causes += lastException.toString + "\n"
    +      lastException = lastException.getCause
    +    }
    +    causes
    +  }
     
    -class HiveClientSuite extends SparkFunSuite {
    -  private val clientBuilder = new HiveClientBuilder
    +  private val emptyDir = Utils.createTempDir().getCanonicalPath
     
    -  private val tryDirectSqlKey = 
HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL.varname
    +  private var versionSpark: TestHiveVersion = null
     
    -  test(s"getPartitionsByFilter returns all partitions when 
$tryDirectSqlKey=false") {
    -    val testPartitionCount = 5
    +  test("success sanity check") {
    +    val client = buildClient(new Configuration())
    +    val db = new CatalogDatabase("default", "desc", new URI("loc"), Map())
    +    client.createDatabase(db, ignoreIfExists = true)
    +  }
     
    -    val storageFormat = CatalogStorageFormat(
    -      locationUri = None,
    -      inputFormat = None,
    -      outputFormat = None,
    -      serde = None,
    -      compressed = false,
    -      properties = Map.empty)
    +  test("hadoop configuration preserved") {
    +    val hadoopConf = new Configuration()
    +    hadoopConf.set("test", "success")
    +    val client = buildClient(hadoopConf)
    +    assert("success" === client.getConf("test", null))
    +  }
     
    +  // Should this be in a beforeAll() method instead?
    --- End diff --
    
    Should this be in a `beforeAll()` method instead? This actually initializes 
`client` for use by other tests. Putting that in a `beforeAll()` method feels 
more appropriate.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to