dongjoon-hyun commented on code in PR #12:
URL: 
https://github.com/apache/spark-kubernetes-operator/pull/12#discussion_r1612003267


##########
spark-operator/src/main/java/org/apache/spark/k8s/operator/config/SparkOperatorConf.java:
##########
@@ -0,0 +1,433 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.k8s.operator.config;
+
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import io.javaoperatorsdk.operator.api.config.LeaderElectionConfiguration;
+import io.javaoperatorsdk.operator.processing.event.rate.LinearRateLimiter;
+import io.javaoperatorsdk.operator.processing.event.rate.RateLimiter;
+import io.javaoperatorsdk.operator.processing.retry.GenericRetry;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+
+import org.apache.spark.k8s.operator.listeners.SparkAppStatusListener;
+import org.apache.spark.k8s.operator.reconciler.SparkReconcilerUtils;
+
+/** Spark Operator Configuration options. */
+@Slf4j
+public class SparkOperatorConf {
+  public static final ConfigOption<String> OperatorAppName =
+      ConfigOption.<String>builder()
+          .key("spark.operator.name")
+          .typeParameterClass(String.class)
+          .description("Name of the operator.")
+          .defaultValue("spark-kubernetes-operator")
+          .enableDynamicOverride(false)
+          .build();
+  public static final ConfigOption<String> OperatorNamespace =
+      ConfigOption.<String>builder()
+          .key("spark.operator.namespace")
+          .typeParameterClass(String.class)
+          .description("Namespace that operator is deployed within.")
+          .defaultValue("default")
+          .enableDynamicOverride(false)
+          .build();
+  public static final ConfigOption<Boolean> DynamicConfigEnabled =
+      ConfigOption.<Boolean>builder()
+          .key("spark.operator.dynamic.config.enabled")
+          .typeParameterClass(Boolean.class)
+          .description(
+              "When enabled, operator would use config map as source of truth 
for config "
+                  + "property override. The config map need to be created in "
+                  + "spark.operator.namespace, and labeled with operator 
name.")
+          .defaultValue(false)
+          .enableDynamicOverride(false)
+          .build();
+  public static final ConfigOption<String> DynamicConfigSelectorStr =
+      ConfigOption.<String>builder()
+          .key("spark.operator.dynamic.config.selector.str")
+          .typeParameterClass(String.class)
+          .description("The selector str applied to dynamic config map.")
+          .defaultValue(
+              
SparkReconcilerUtils.labelsAsStr(SparkReconcilerUtils.defaultOperatorConfigLabels()))
+          .enableDynamicOverride(false)
+          .build();
+  public static final ConfigOption<Integer> DynamicConfigReconcilerParallelism 
=
+      ConfigOption.<Integer>builder()
+          .key("spark.operator.dynamic.config.reconciler.parallelism")
+          .description("Parallelism for dynamic config reconciler. Use -1 for 
unbounded pool.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(1)
+          .build();
+  public static final ConfigOption<Boolean> TerminateOnInformerFailure =
+      ConfigOption.<Boolean>builder()
+          .key("spark.operator.terminate.on.informer.failure")
+          .typeParameterClass(Boolean.class)
+          .description(
+              "Enable to indicate informer errors should stop operator 
startup. If "
+                  + "disabled, operator startup will ignore recoverable 
errors, "
+                  + "caused for example by RBAC issues and will retry "
+                  + "periodically.")
+          .defaultValue(false)
+          .enableDynamicOverride(false)
+          .build();
+  public static final ConfigOption<Integer> TerminationTimeoutSeconds =
+      ConfigOption.<Integer>builder()
+          .key("spark.operator.termination.timeout.seconds")
+          .description(
+              "Grace period for operator shutdown before reconciliation 
threads are killed.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(30)
+          .build();
+  public static final ConfigOption<Integer> ReconcilerParallelism =
+      ConfigOption.<Integer>builder()
+          .key("spark.operator.reconciler.parallelism")
+          .description(
+              "Thread pool size for Spark Operator reconcilers. Use -1 for 
unbounded pool.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(30)
+          .build();
+  public static final ConfigOption<Integer> RateLimiterRefreshPeriodSeconds =
+      ConfigOption.<Integer>builder()
+          .key("spark.operator.rate.limiter.refresh.period.seconds")
+          .description("Operator rate limiter refresh period(in seconds) for 
each resource.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(15)
+          .build();
+  public static final ConfigOption<Integer> RateLimiterLimit =
+      ConfigOption.<Integer>builder()
+          .key("spark.operator.rate.limiter.limit")
+          .description(
+              "Max number of reconcile loops triggered within the rate limiter 
refresh "
+                  + "period for each resource. Setting the limit <= 0 disables 
the "
+                  + "limiter.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(5)
+          .build();
+  public static final ConfigOption<Integer> RetryInitialInternalSeconds =
+      ConfigOption.<Integer>builder()
+          .key("spark.operator.retry.initial.internal.seconds")
+          .description("Initial interval(in seconds) of retries on unhandled 
controller errors.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(5)
+          .build();
+  public static final ConfigOption<Double> RetryInternalMultiplier =
+      ConfigOption.<Double>builder()
+          .key("spark.operator.retry.internal.multiplier")
+          .description("Interval multiplier of retries on unhandled controller 
errors.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Double.class)
+          .defaultValue(1.5)
+          .build();
+  public static final ConfigOption<Integer> RetryMaxIntervalSeconds =
+      ConfigOption.<Integer>builder()
+          .key("spark.operator.retry.max.interval.seconds")
+          .description(
+              "Max interval(in seconds) of retries on unhandled controller 
errors. "
+                  + "Set to -1 for unlimited.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(-1)
+          .build();
+  public static final ConfigOption<Integer> RetryMaxAttempts =
+      ConfigOption.<Integer>builder()
+          .key("spark.operator.retry.max.attempts")
+          .description("Max attempts of retries on unhandled controller 
errors.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(15)
+          .build();
+  public static final ConfigOption<Long> SecondaryResourceCreateMaxAttempts =
+      ConfigOption.<Long>builder()
+          .key("spark.operator.secondary.resource.create.max.attempts")
+          .description(
+              "Maximal number of retry attempts of requesting secondary 
resource for Spark "
+                  + "application.")
+          .defaultValue(3L)
+          .typeParameterClass(Long.class)
+          .build();
+  public static final ConfigOption<Long> MaxRetryAttemptOnKubeServerFailure =
+      ConfigOption.<Long>builder()
+          .key("spark.operator.max.retry.attempts.on.k8s.failure")

Review Comment:
   This config naming looks a little too broad. The variable name 
(`MaxRetryAttemptOnKubeServerFailure`) is better than this. 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to