Github user rxin commented on a diff in the pull request:
https://github.com/apache/spark/pull/6747#discussion_r32148778
--- Diff: sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala ---
@@ -25,74 +25,283 @@ import scala.collection.JavaConversions._
import org.apache.spark.sql.catalyst.CatalystConf
private[spark] object SQLConf {
- val COMPRESS_CACHED = "spark.sql.inMemoryColumnarStorage.compressed"
- val COLUMN_BATCH_SIZE = "spark.sql.inMemoryColumnarStorage.batchSize"
- val IN_MEMORY_PARTITION_PRUNING =
"spark.sql.inMemoryColumnarStorage.partitionPruning"
- val AUTO_BROADCASTJOIN_THRESHOLD = "spark.sql.autoBroadcastJoinThreshold"
- val DEFAULT_SIZE_IN_BYTES = "spark.sql.defaultSizeInBytes"
- val SHUFFLE_PARTITIONS = "spark.sql.shuffle.partitions"
- val CODEGEN_ENABLED = "spark.sql.codegen"
- val UNSAFE_ENABLED = "spark.sql.unsafe.enabled"
- val DIALECT = "spark.sql.dialect"
- val CASE_SENSITIVE = "spark.sql.caseSensitive"
-
- val PARQUET_BINARY_AS_STRING = "spark.sql.parquet.binaryAsString"
- val PARQUET_INT96_AS_TIMESTAMP = "spark.sql.parquet.int96AsTimestamp"
- val PARQUET_CACHE_METADATA = "spark.sql.parquet.cacheMetadata"
- val PARQUET_COMPRESSION = "spark.sql.parquet.compression.codec"
- val PARQUET_FILTER_PUSHDOWN_ENABLED = "spark.sql.parquet.filterPushdown"
- val PARQUET_USE_DATA_SOURCE_API = "spark.sql.parquet.useDataSourceApi"
-
- val ORC_FILTER_PUSHDOWN_ENABLED = "spark.sql.orc.filterPushdown"
-
- val HIVE_VERIFY_PARTITIONPATH = "spark.sql.hive.verifyPartitionPath"
-
- val COLUMN_NAME_OF_CORRUPT_RECORD = "spark.sql.columnNameOfCorruptRecord"
- val BROADCAST_TIMEOUT = "spark.sql.broadcastTimeout"
+
+ private val sqlConfEntries = java.util.Collections.synchronizedMap(
+ new java.util.HashMap[String, SQLConfEntry[_]]())
+
+ private[sql] class SQLConfEntry[T] private(
+ val key: String,
+ val defaultValue: Option[T],
+ val valueConverter: String => T,
+ val stringConverter: T => String,
+ val doc: String,
+ val isPublic: Boolean) {
+
+ def defaultValueString: String =
defaultValue.map(stringConverter).getOrElse("<undefined>")
+
+ override def toString: String =
+ s"SQLConfEntry(key = $key, defaultValue=$defaultValueString,
doc=$doc, isPublic = $isPublic)"
+ }
+
+ private[sql] object SQLConfEntry {
+
+ private def apply[T](
+ key: String,
+ defaultValue: Option[T],
+ valueConverter: String => T,
+ stringConverter: T => String,
+ doc: String,
+ isPublic: Boolean): SQLConfEntry[T] =
+ sqlConfEntries.synchronized {
+ if (sqlConfEntries.containsKey(key)) {
+ throw new IllegalArgumentException(s"Duplicate SQLConfEntry.
$key has been registered")
+ }
+ val entry =
+ new SQLConfEntry[T](key, defaultValue, valueConverter,
stringConverter, doc, isPublic)
+ sqlConfEntries.put(key, entry)
+ entry
+ }
+
+ def intConf(
--- End diff --
we can also remove the intConf, in favor of only longConf
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]