This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git

commit dd97bcbda5be70ff6d3e7192bb7782092e80483c
Author: beliefer <[email protected]>
AuthorDate: Fri Mar 6 11:08:57 2020 +0900

    [SPARK-30914][CORE][DOC] Add version information to the configuration of UI
    
    ### What changes were proposed in this pull request?
    1.Add version information to the configuration of `UI`.
    2.Update the docs of `UI`.
    
    I sorted out some information show below.
    
    Item name | Since version | JIRA ID | Commit ID | Note
    -- | -- | -- | -- | --
    spark.ui.showConsoleProgress | 1.2.1 | SPARK-4017 | 
04b1bdbae31c3039125100e703121daf7d9dabf5#diff-364713d7776956cb8b0a771e9b62f82d 
|  
    spark.ui.consoleProgress.update.interval | 2.1.0 | SPARK-16919 | 
e076fb05ac83a3ed6995e29bb03ea07ea05e39db#diff-fbf4e388a66b6a37e984b91cd71a3e2c 
|  
    spark.ui.enabled | 1.1.1 | SPARK-3490 | 
937de93e80e6d299c4d08be426da2d5bc2d66f98#diff-364713d7776956cb8b0a771e9b62f82d 
|  
    spark.ui.port | 0.7.0 | None | 
f03d9760fd8ac67fd0865cb355ba75d2eff507fe#diff-ed8dbcebe16fda5ecd6df1a981dc6fee 
|  
    spark.ui.filters | 1.0.0 | SPARK-1189 | 
7edbea41b43e0dc11a2de156be220db8b7952d01#diff-f79a5ead735b3d0b34b6b94486918e1c 
|  
    spark.ui.allowFramingFrom | 1.6.0 | SPARK-10589 | 
5dbaf3d3911bbfa003bc75459aaad66b4f6e0c67#diff-f79a5ead735b3d0b34b6b94486918e1c 
|  
    spark.ui.reverseProxy | 2.1.0 | SPARK-15487 | 
92ce8d4849a0341c4636e70821b7be57ad3055b1#diff-364713d7776956cb8b0a771e9b62f82d |
    spark.ui.reverseProxyUrl | 2.1.0 | SPARK-15487 | 
92ce8d4849a0341c4636e70821b7be57ad3055b1#diff-364713d7776956cb8b0a771e9b62f82d |
    spark.ui.killEnabled | 1.0.0 | SPARK-1202 | 
211f97447b5f078afcb1619a08d2e2349325f61a#diff-a40023c80383451b6e29ee7a6e0593e9 |
    spark.ui.threadDumpsEnabled | 1.2.0 | SPARK-611 | 
866c7bbe56f9c7fd96d3f4afe8a76405dc877a6e#diff-5d18fb70c572369a0fff0b97de94f265 
|  
    spark.ui.prometheus.enabled | 3.0.0 | SPARK-29064 | 
bbfaadb280a80b511a98d18881641c6d9851dd51#diff-f70174ad0759db1fb4cb36a7ff9324a7 
|  
    spark.ui.xXssProtection | 2.3.0 | SPARK-22188 | 
5a07aca4d464e96d75ea17bf6768e24b829872ec#diff-6bdad48cfc34314e89599655442ff210 
|  
    spark.ui.xContentTypeOptions.enabled | 2.3.0 | SPARK-22188 | 
5a07aca4d464e96d75ea17bf6768e24b829872ec#diff-6bdad48cfc34314e89599655442ff210 
|  
    spark.ui.strictTransportSecurity | 2.3.0 | SPARK-22188 | 
5a07aca4d464e96d75ea17bf6768e24b829872ec#diff-6bdad48cfc34314e89599655442ff210 
|  
    spark.ui.requestHeaderSize | 2.2.3 | SPARK-26118 | 
9ceee6f188e6c3794d31ce15cc61d29f907bebf7#diff-6bdad48cfc34314e89599655442ff210 
|  
    spark.ui.timeline.tasks.maximum | 1.4.0 | SPARK-7296 | 
a5f7b3b9c7f05598a1cc8e582e5facee1029cd5e#diff-fa4cfb2cce1b925f55f41f2dfa8c8501 
|  
    spark.acls.enable | 1.1.0 | SPARK-1890 and SPARK-1891 | 
e3fe6571decfdc406ec6d505fd92f9f2b85a618c#diff-afd88f677ec5ff8b5e96a5cbbe00cd98 
|  
    spark.ui.view.acls | 1.0.0 | SPARK-1189 | 
7edbea41b43e0dc11a2de156be220db8b7952d01#diff-afd88f677ec5ff8b5e96a5cbbe00cd98 
|  
    spark.ui.view.acls.groups | 2.0.0 | SPARK-4224 | 
ae79032dcf160796851ca29116cca146c4d86ada#diff-afd88f677ec5ff8b5e96a5cbbe00cd98 
|  
    spark.admin.acls | 1.1.0 | SPARK-1890 and SPARK-1891 | 
e3fe6571decfdc406ec6d505fd92f9f2b85a618c#diff-afd88f677ec5ff8b5e96a5cbbe00cd98 
|  
    spark.admin.acls.groups | 2.0.0 | SPARK-4224 | 
ae79032dcf160796851ca29116cca146c4d86ada#diff-afd88f677ec5ff8b5e96a5cbbe00cd98 
|  
    spark.modify.acls | 1.1.0 | SPARK-1890 and SPARK-1891 | 
e3fe6571decfdc406ec6d505fd92f9f2b85a618c#diff-afd88f677ec5ff8b5e96a5cbbe00cd98 
|  
    spark.modify.acls.groups | 2.0.0 | SPARK-4224 | 
ae79032dcf160796851ca29116cca146c4d86ada#diff-afd88f677ec5ff8b5e96a5cbbe00cd98 
|  
    spark.user.groups.mapping | 2.0.0 | SPARK-4224 | 
ae79032dcf160796851ca29116cca146c4d86ada#diff-afd88f677ec5ff8b5e96a5cbbe00cd98 
|  
    spark.ui.proxyRedirectUri | 3.0.0 | SPARK-30240 | 
a9fbd310300e57ed58818d7347f3c3172701c491#diff-f70174ad0759db1fb4cb36a7ff9324a7 
|  
    spark.ui.custom.executor.log.url | 3.0.0 | SPARK-26792 | 
d5bda2c9e8dde6afc075cc7f65b15fa9aa82231c#diff-f70174ad0759db1fb4cb36a7ff9324a7 
|  
    
    ### Why are the changes needed?
    Supplemental configuration version information.
    
    ### Does this PR introduce any user-facing change?
    No
    
    ### How was this patch tested?
    Exists UT
    
    Closes #27806 from beliefer/add-version-to-UI-config.
    
    Authored-by: beliefer <[email protected]>
    Signed-off-by: HyukjinKwon <[email protected]>
---
 .../org/apache/spark/internal/config/UI.scala      | 26 ++++++++++++++++++++++
 docs/configuration.md                              | 10 +++++++++
 2 files changed, 36 insertions(+)

diff --git a/core/src/main/scala/org/apache/spark/internal/config/UI.scala 
b/core/src/main/scala/org/apache/spark/internal/config/UI.scala
index 60d9857..231eecf 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/UI.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/UI.scala
@@ -25,31 +25,37 @@ private[spark] object UI {
 
   val UI_SHOW_CONSOLE_PROGRESS = ConfigBuilder("spark.ui.showConsoleProgress")
     .doc("When true, show the progress bar in the console.")
+    .version("1.2.1")
     .booleanConf
     .createWithDefault(false)
 
   val UI_CONSOLE_PROGRESS_UPDATE_INTERVAL =
     ConfigBuilder("spark.ui.consoleProgress.update.interval")
+      .version("2.1.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(200)
 
   val UI_ENABLED = ConfigBuilder("spark.ui.enabled")
     .doc("Whether to run the web UI for the Spark application.")
+    .version("1.1.1")
     .booleanConf
     .createWithDefault(true)
 
   val UI_PORT = ConfigBuilder("spark.ui.port")
     .doc("Port for your application's dashboard, which shows memory and 
workload data.")
+    .version("0.7.0")
     .intConf
     .createWithDefault(4040)
 
   val UI_FILTERS = ConfigBuilder("spark.ui.filters")
     .doc("Comma separated list of filter class names to apply to the Spark Web 
UI.")
+    .version("1.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   val UI_ALLOW_FRAMING_FROM = ConfigBuilder("spark.ui.allowFramingFrom")
+    .version("1.6.0")
     .stringConf
     .createOptional
 
@@ -61,6 +67,7 @@ private[spark] object UI {
       "through spark master/proxy public URL. This setting affects all the 
workers and " +
       "application UIs running in the cluster and must be set on all the 
workers, drivers " +
       " and masters.")
+    .version("2.1.0")
     .booleanConf
     .createWithDefault(false)
 
@@ -69,15 +76,18 @@ private[spark] object UI {
       "in front of Spark Master. This is useful when running proxy for 
authentication e.g. " +
       "OAuth proxy. Make sure this is a complete URL including scheme 
(http/https) and port to " +
       "reach your proxy.")
+    .version("2.1.0")
     .stringConf
     .createOptional
 
   val UI_KILL_ENABLED = ConfigBuilder("spark.ui.killEnabled")
     .doc("Allows jobs and stages to be killed from the web UI.")
+    .version("1.0.0")
     .booleanConf
     .createWithDefault(true)
 
   val UI_THREAD_DUMPS_ENABLED = ConfigBuilder("spark.ui.threadDumpsEnabled")
+    .version("1.2.0")
     .booleanConf
     .createWithDefault(true)
 
@@ -85,73 +95,88 @@ private[spark] object UI {
     .internal()
     .doc("Expose executor metrics at /metrics/executors/prometheus. " +
       "For master/worker/driver metrics, you need to configure 
`conf/metrics.properties`.")
+    .version("3.0.0")
     .booleanConf
     .createWithDefault(false)
 
   val UI_X_XSS_PROTECTION = ConfigBuilder("spark.ui.xXssProtection")
     .doc("Value for HTTP X-XSS-Protection response header")
+    .version("2.3.0")
     .stringConf
     .createWithDefaultString("1; mode=block")
 
   val UI_X_CONTENT_TYPE_OPTIONS = 
ConfigBuilder("spark.ui.xContentTypeOptions.enabled")
     .doc("Set to 'true' for setting X-Content-Type-Options HTTP response 
header to 'nosniff'")
+    .version("2.3.0")
     .booleanConf
     .createWithDefault(true)
 
   val UI_STRICT_TRANSPORT_SECURITY = 
ConfigBuilder("spark.ui.strictTransportSecurity")
     .doc("Value for HTTP Strict Transport Security Response Header")
+    .version("2.3.0")
     .stringConf
     .createOptional
 
   val UI_REQUEST_HEADER_SIZE = ConfigBuilder("spark.ui.requestHeaderSize")
     .doc("Value for HTTP request header size in bytes.")
+    .version("2.2.3")
     .bytesConf(ByteUnit.BYTE)
     .createWithDefaultString("8k")
 
   val UI_TIMELINE_TASKS_MAXIMUM = 
ConfigBuilder("spark.ui.timeline.tasks.maximum")
+    .version("1.4.0")
     .intConf
     .createWithDefault(1000)
 
   val ACLS_ENABLE = ConfigBuilder("spark.acls.enable")
+    .version("1.1.0")
     .booleanConf
     .createWithDefault(false)
 
   val UI_VIEW_ACLS = ConfigBuilder("spark.ui.view.acls")
+    .version("1.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   val UI_VIEW_ACLS_GROUPS = ConfigBuilder("spark.ui.view.acls.groups")
+    .version("2.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   val ADMIN_ACLS = ConfigBuilder("spark.admin.acls")
+    .version("1.1.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   val ADMIN_ACLS_GROUPS = ConfigBuilder("spark.admin.acls.groups")
+    .version("2.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   val MODIFY_ACLS = ConfigBuilder("spark.modify.acls")
+    .version("1.1.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   val MODIFY_ACLS_GROUPS = ConfigBuilder("spark.modify.acls.groups")
+    .version("2.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   val USER_GROUPS_MAPPING = ConfigBuilder("spark.user.groups.mapping")
+    .version("2.0.0")
     .stringConf
     
.createWithDefault("org.apache.spark.security.ShellBasedGroupsMappingProvider")
 
   val PROXY_REDIRECT_URI = ConfigBuilder("spark.ui.proxyRedirectUri")
     .doc("Proxy address to use when responding with HTTP redirects.")
+    .version("3.0.0")
     .stringConf
     .createOptional
 
@@ -163,6 +188,7 @@ private[spark] object UI {
       "This configuration replaces original log urls in event log, which will 
be also effective " +
       "when accessing the application on history server. The new log urls must 
be permanent, " +
       "otherwise you might have dead link for executor log urls.")
+    .version("3.0.0")
     .stringConf
     .createOptional
 }
diff --git a/docs/configuration.md b/docs/configuration.md
index c586ecf..9cbe341 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -1031,6 +1031,7 @@ Apart from these, the following properties are also 
available, and may be useful
   <td>
     Whether to run the web UI for the Spark application.
   </td>
+  <td>1.1.1</td>
 </tr>
 <tr>
   <td><code>spark.ui.killEnabled</code></td>
@@ -1038,6 +1039,7 @@ Apart from these, the following properties are also 
available, and may be useful
   <td>
     Allows jobs and stages to be killed from the web UI.
   </td>
+  <td>1.0.0</td>
 </tr>
 <tr>
   <td><code>spark.ui.liveUpdate.period</code></td>
@@ -1062,6 +1064,7 @@ Apart from these, the following properties are also 
available, and may be useful
   <td>
     Port for your application's dashboard, which shows memory and workload 
data.
   </td>
+  <td>0.7.0</td>
 </tr>
 <tr>
   <td><code>spark.ui.retainedJobs</code></td>
@@ -1093,6 +1096,7 @@ Apart from these, the following properties are also 
available, and may be useful
   <td>
     Enable running Spark Master as reverse proxy for worker and application 
UIs. In this mode, Spark master will reverse proxy the worker and application 
UIs to enable access without requiring direct access to their hosts. Use it 
with caution, as worker and application UI will not be accessible directly, you 
will only be able to access them through spark master/proxy public URL. This 
setting affects all the workers and application UIs running in the cluster and 
must be set on all the wor [...]
   </td>
+  <td>2.1.0</td>
 </tr>
 <tr>
   <td><code>spark.ui.reverseProxyUrl</code></td>
@@ -1100,6 +1104,7 @@ Apart from these, the following properties are also 
available, and may be useful
   <td>
     This is the URL where your proxy is running. This URL is for proxy which 
is running in front of Spark Master. This is useful when running proxy for 
authentication e.g. OAuth proxy. Make sure this is a complete URL including 
scheme (http/https) and port to reach your proxy.
   </td>
+  <td>2.1.0</td>
 </tr>
 <tr>
   <td><code>spark.ui.proxyRedirectUri</code></td>
@@ -1112,6 +1117,7 @@ Apart from these, the following properties are also 
available, and may be useful
     <code>X-Forwarded-Context</code> request header), or by setting the proxy 
base in the Spark
     app's configuration.
   </td>
+  <td>3.0.0</td>
 </tr>
 <tr>
   <td><code>spark.ui.showConsoleProgress</code></td>
@@ -1123,6 +1129,7 @@ Apart from these, the following properties are also 
available, and may be useful
     <br/>
     <em>Note:</em> In shell environment, the default value of 
spark.ui.showConsoleProgress is true.
   </td>
+  <td>1.2.1</td>
 </tr>
 <tr>
   <td><code>spark.ui.custom.executor.log.url</code></td>
@@ -1138,6 +1145,7 @@ Apart from these, the following properties are also 
available, and may be useful
     <p/>
     For now, only YARN mode supports this configuration
   </td>
+  <td>3.0.0</td>
 </tr>
 <tr>
   <td><code>spark.worker.ui.retainedExecutors</code></td>
@@ -1190,6 +1198,7 @@ Apart from these, the following properties are also 
available, and may be useful
     <br /><code>spark.com.test.filter1.param.name1=foo</code>
     <br /><code>spark.com.test.filter1.param.name2=bar</code>
   </td>
+  <td>1.0.0</td>
 </tr>
 <tr>
   <td><code>spark.ui.requestHeaderSize</code></td>
@@ -1198,6 +1207,7 @@ Apart from these, the following properties are also 
available, and may be useful
     The maximum allowed size for a HTTP request header, in bytes unless 
otherwise specified.
     This setting applies for the Spark History Server too.
   </td>
+  <td>2.2.3</td>
 </tr>
 </table>
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to