This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git
The following commit(s) were added to refs/heads/main by this push:
new d177960 [SPARK-49158] Enforce `ConfusingTernary` and
`PrematureDeclaration` rules
d177960 is described below
commit d1779605edf82d56afb6cae157b6e6fc7a2b23b5
Author: William Hyun <[email protected]>
AuthorDate: Thu Aug 8 00:18:20 2024 -0700
[SPARK-49158] Enforce `ConfusingTernary` and `PrematureDeclaration` rules
### What changes were proposed in this pull request?
This PR aims to enforce ConfusingTernary and PrematureDeclaration rules.
### Why are the changes needed?
To ensure future code quality.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #39 from williamhyun/ternary.
Authored-by: William Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
config/pmd/ruleset.xml | 2 --
.../k8s/operator/metrics/source/OperatorJosdkMetrics.java | 12 ++++++------
.../org/apache/spark/k8s/operator/probe/ProbeService.java | 3 +--
.../spark/k8s/operator/reconciler/SparkAppReconciler.java | 3 +--
.../reconciler/observers/AppDriverTimeoutObserver.java | 4 ++--
.../operator/reconciler/reconcilesteps/AppRunningStep.java | 8 ++++----
.../org/apache/spark/k8s/operator/utils/ReconcilerUtils.java | 12 ++++++------
.../org/apache/spark/k8s/operator/utils/StatusRecorder.java | 3 +--
8 files changed, 21 insertions(+), 26 deletions(-)
diff --git a/config/pmd/ruleset.xml b/config/pmd/ruleset.xml
index f4a6c25..6a67e97 100644
--- a/config/pmd/ruleset.xml
+++ b/config/pmd/ruleset.xml
@@ -28,7 +28,6 @@
<rule ref="category/java/codestyle.xml">
<exclude name="AtLeastOneConstructor" />
<exclude name="CommentDefaultAccessModifier" />
- <exclude name="ConfusingTernary" />
<exclude name="FieldDeclarationsShouldBeAtStartOfClass" />
<exclude name="FieldNamingConventions" />
<exclude name="GenericsNaming" />
@@ -37,7 +36,6 @@
<exclude name="LongVariable" />
<exclude name="MethodArgumentCouldBeFinal" />
<exclude name="OnlyOneReturn" />
- <exclude name="PrematureDeclaration" />
<exclude name="ShortVariable" />
<exclude name="TooManyStaticImports" />
<exclude name="UseUnderscoresInNumericLiterals" />
diff --git
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/metrics/source/OperatorJosdkMetrics.java
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/metrics/source/OperatorJosdkMetrics.java
index 6eb8867..530848e 100644
---
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/metrics/source/OperatorJosdkMetrics.java
+++
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/metrics/source/OperatorJosdkMetrics.java
@@ -271,11 +271,11 @@ public class OperatorJosdkMetrics implements Source,
Metrics {
private Histogram getHistogram(Class<?> kclass, String... names) {
String name = MetricRegistry.name(kclass.getSimpleName(),
names).toLowerCase();
Histogram histogram;
- if (!histograms.containsKey(name)) {
+ if (histograms.containsKey(name)) {
+ histogram = histograms.get(name);
+ } else {
histogram = metricRegistry.histogram(name);
histograms.put(name, histogram);
- } else {
- histogram = histograms.get(name);
}
return histogram;
}
@@ -283,11 +283,11 @@ public class OperatorJosdkMetrics implements Source,
Metrics {
private Counter getCounter(Class<?> klass, String... names) {
String name = MetricRegistry.name(klass.getSimpleName(),
names).toLowerCase();
Counter counter;
- if (!counters.containsKey(name)) {
+ if (counters.containsKey(name)) {
+ counter = counters.get(name);
+ } else {
counter = metricRegistry.counter(name);
counters.put(name, counter);
- } else {
- counter = counters.get(name);
}
return counter;
}
diff --git
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/probe/ProbeService.java
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/probe/ProbeService.java
index 1ab82aa..d562e7b 100644
---
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/probe/ProbeService.java
+++
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/probe/ProbeService.java
@@ -41,14 +41,13 @@ public class ProbeService {
public ProbeService(
List<Operator> operators, List<SentinelManager<?>> sentinelManagers,
Executor executor) {
- HealthProbe healthProbe = new HealthProbe(operators, sentinelManagers);
try {
this.server = HttpServer.create(new
InetSocketAddress(OPERATOR_PROBE_PORT.getValue()), 0);
} catch (IOException e) {
throw new IllegalStateException("Failed to create Probe Service Server",
e);
}
server.createContext(READYZ, new ReadinessProbe(operators));
- server.createContext(HEALTHZ, healthProbe);
+ server.createContext(HEALTHZ, new HealthProbe(operators,
sentinelManagers));
server.setExecutor(executor);
}
diff --git
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppReconciler.java
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppReconciler.java
index 0be8a6a..3956935 100644
---
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppReconciler.java
+++
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/SparkAppReconciler.java
@@ -204,7 +204,6 @@ public class SparkAppReconciler
public DeleteControl cleanup(
SparkApplication sparkApplication, Context<SparkApplication> context) {
LoggingUtils.TrackedMDC trackedMDC = new LoggingUtils.TrackedMDC();
- DeleteControl deleteControl = DeleteControl.defaultDelete();
try {
trackedMDC.set(sparkApplication);
log.info("Cleaning up resources for SparkApp.");
@@ -229,6 +228,6 @@ public class SparkAppReconciler
trackedMDC.reset();
}
sparkAppStatusRecorder.removeCachedStatus(sparkApplication);
- return deleteControl;
+ return DeleteControl.defaultDelete();
}
}
diff --git
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/observers/AppDriverTimeoutObserver.java
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/observers/AppDriverTimeoutObserver.java
index a32968a..0dda282 100644
---
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/observers/AppDriverTimeoutObserver.java
+++
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/observers/AppDriverTimeoutObserver.java
@@ -58,8 +58,6 @@ public class AppDriverTimeoutObserver extends
BaseAppDriverObserver {
@Override
public Optional<ApplicationState> observe(
Pod driver, ApplicationSpec spec, ApplicationStatus currentStatus) {
- Instant lastTransitionTime =
- Instant.parse(currentStatus.getCurrentState().getLastTransitionTime());
long timeoutThreshold;
Supplier<ApplicationState> supplier;
ApplicationTimeoutConfig timeoutConfig =
@@ -82,6 +80,8 @@ public class AppDriverTimeoutObserver extends
BaseAppDriverObserver {
// No timeout check needed for other states
return Optional.empty();
}
+ Instant lastTransitionTime =
+ Instant.parse(currentStatus.getCurrentState().getLastTransitionTime());
if (timeoutThreshold > 0L
&&
lastTransitionTime.plusMillis(timeoutThreshold).isBefore(Instant.now())) {
ApplicationState state = supplier.get();
diff --git
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/reconcilesteps/AppRunningStep.java
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/reconcilesteps/AppRunningStep.java
index 0c529a9..6d614ec 100644
---
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/reconcilesteps/AppRunningStep.java
+++
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/reconciler/reconcilesteps/AppRunningStep.java
@@ -76,7 +76,10 @@ public class AppRunningStep extends AppReconcileStep {
}
}
}
- if (!proposedStateSummary.equals(prevStateSummary)) {
+ if (proposedStateSummary.equals(prevStateSummary)) {
+ return observeDriver(
+ context, statusRecorder, Collections.singletonList(new
AppDriverRunningObserver()));
+ } else {
statusRecorder.persistStatus(
context,
context
@@ -84,9 +87,6 @@ public class AppRunningStep extends AppReconcileStep {
.getStatus()
.appendNewState(new ApplicationState(proposedStateSummary,
stateMessage)));
return completeAndDefaultRequeue();
- } else {
- return observeDriver(
- context, statusRecorder, Collections.singletonList(new
AppDriverRunningObserver()));
}
}
}
diff --git
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/ReconcilerUtils.java
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/ReconcilerUtils.java
index ca25833..176bbcc 100644
---
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/ReconcilerUtils.java
+++
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/ReconcilerUtils.java
@@ -87,9 +87,7 @@ public class ReconcilerUtils {
maxAttempts);
}
// retry only on 409 Conflict
- if (e.getCode() != 409) {
- throw e;
- } else {
+ if (e.getCode() == 409) {
if (isConflictForExistingResource(e)) {
current = getResource(client, resource);
if (current.isPresent()) {
@@ -100,6 +98,8 @@ public class ReconcilerUtils {
log.error("Max Retries exceeded while trying to create
resource");
throw e;
}
+ } else {
+ throw e;
}
}
}
@@ -147,10 +147,10 @@ public class ReconcilerUtils {
.delete();
}
} catch (KubernetesClientException e) {
- if (e.getCode() != 404) {
- throw e;
- } else {
+ if (e.getCode() == 404) {
log.info("Pod to delete does not exist, proceeding...");
+ } else {
+ throw e;
}
}
}
diff --git
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/StatusRecorder.java
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/StatusRecorder.java
index 42c07c1..7aa7c42 100644
---
a/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/StatusRecorder.java
+++
b/spark-operator/src/main/java/org/apache/spark/k8s/operator/utils/StatusRecorder.java
@@ -88,8 +88,6 @@ public class StatusRecorder<
return;
}
- STATUS prevStatus = objectMapper.convertValue(previousStatusNode,
statusClass);
-
Exception err = null;
long maxRetry = API_STATUS_PATCH_MAX_ATTEMPTS.getValue();
for (long i = 0; i < maxRetry; i++) {
@@ -110,6 +108,7 @@ public class StatusRecorder<
}
statusCache.put(resourceId, newStatusNode);
+ STATUS prevStatus = objectMapper.convertValue(previousStatusNode,
statusClass);
statusListeners.forEach(
listener -> {
listener.listenStatus(resource, prevStatus, resource.getStatus());
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]