Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/6747#discussion_r32296066
  
    --- Diff: sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala ---
    @@ -25,74 +25,314 @@ import scala.collection.JavaConversions._
     import org.apache.spark.sql.catalyst.CatalystConf
     
     private[spark] object SQLConf {
    -  val COMPRESS_CACHED = "spark.sql.inMemoryColumnarStorage.compressed"
    -  val COLUMN_BATCH_SIZE = "spark.sql.inMemoryColumnarStorage.batchSize"
    -  val IN_MEMORY_PARTITION_PRUNING = 
"spark.sql.inMemoryColumnarStorage.partitionPruning"
    -  val AUTO_BROADCASTJOIN_THRESHOLD = "spark.sql.autoBroadcastJoinThreshold"
    -  val DEFAULT_SIZE_IN_BYTES = "spark.sql.defaultSizeInBytes"
    -  val SHUFFLE_PARTITIONS = "spark.sql.shuffle.partitions"
    -  val CODEGEN_ENABLED = "spark.sql.codegen"
    -  val UNSAFE_ENABLED = "spark.sql.unsafe.enabled"
    -  val DIALECT = "spark.sql.dialect"
    -  val CASE_SENSITIVE = "spark.sql.caseSensitive"
    -
    -  val PARQUET_BINARY_AS_STRING = "spark.sql.parquet.binaryAsString"
    -  val PARQUET_INT96_AS_TIMESTAMP = "spark.sql.parquet.int96AsTimestamp"
    -  val PARQUET_CACHE_METADATA = "spark.sql.parquet.cacheMetadata"
    -  val PARQUET_COMPRESSION = "spark.sql.parquet.compression.codec"
    -  val PARQUET_FILTER_PUSHDOWN_ENABLED = "spark.sql.parquet.filterPushdown"
    -  val PARQUET_USE_DATA_SOURCE_API = "spark.sql.parquet.useDataSourceApi"
    -
    -  val ORC_FILTER_PUSHDOWN_ENABLED = "spark.sql.orc.filterPushdown"
    -
    -  val HIVE_VERIFY_PARTITIONPATH = "spark.sql.hive.verifyPartitionPath"
    -
    -  val COLUMN_NAME_OF_CORRUPT_RECORD = "spark.sql.columnNameOfCorruptRecord"
    -  val BROADCAST_TIMEOUT = "spark.sql.broadcastTimeout"
    +
    +  private val sqlConfEntries = java.util.Collections.synchronizedMap(
    +    new java.util.HashMap[String, SQLConfEntry[_]]())
    +
    +  /**
    +   * An entry contains all meta information for a configuration.
    +   *
    +   * @param key the key for the configuration
    +   * @param defaultValue the default value for the configuration
    +   * @param valueConverter how to convert a string to the value. It should 
throw an exception if the
    +   *                       string does not have the required format.
    +   * @param stringConverter how to convert a value to a string that the 
user can use it as a valid
    +   *                        string value. It's usually `toString`. But 
sometimes, a custom converter
    +   *                        is necessary. E.g., if T is List[String], `a, 
b, c` is better than
    +   *                        `List(a, b, c)`.
    +   * @param doc the document for the configuration
    +   * @param isPublic if this configuration is public to the user. If it's 
`false`, this
    +   *                 configuration is only used internally and we should 
not expose it to the user.
    +   * @tparam T the value type
    +   */
    +  private[sql] class SQLConfEntry[T] private(
    +      val key: String,
    +      val defaultValue: Option[T],
    +      val valueConverter: String => T,
    +      val stringConverter: T => String,
    +      val doc: String,
    +      val isPublic: Boolean) {
    +
    +    def defaultValueString: String = 
defaultValue.map(stringConverter).getOrElse("<undefined>")
    +
    +    override def toString: String = {
    +      s"SQLConfEntry(key = $key, defaultValue=$defaultValueString, 
doc=$doc, isPublic = $isPublic)"
    +    }
    +  }
    +
    +  private[sql] object SQLConfEntry {
    +
    +    private def apply[T](
    +          key: String,
    +          defaultValue: Option[T],
    +          valueConverter: String => T,
    +          stringConverter: T => String,
    +          doc: String,
    +          isPublic: Boolean): SQLConfEntry[T] =
    +      sqlConfEntries.synchronized {
    --- End diff --
    
    Is `sqlConfEntries` already a synchronized map?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to