This is an automated email from the ASF dual-hosted git repository.
aajisaka pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push:
new 1f2ccc7acfd HADOOP-19785. mvn site fails in JDK17 (#8182)
1f2ccc7acfd is described below
commit 1f2ccc7acfdbf47345ffb327807617481ec4e10e
Author: Akira Ajisaka <[email protected]>
AuthorDate: Tue Feb 3 10:42:35 2026 +0900
HADOOP-19785. mvn site fails in JDK17 (#8182)
- Upgrade Maven javadoc plugin
- Add hadoop-hdfs test jar dependency because
hadoop-dynamometer depends on it
- Update Hadoop's custom doclet to support JDK 17/21/25
- Fix Javadoc errors
- Use --source 17 and --target 17 instead of --release 17
to access JDK internal APIs in hadoop-annotation module
- Update checkstyle plugin version to support JDK17 idioms
Signed-off-by: Chris Nauroth <[email protected]>
Reviewed-by: Cheng Pan <[email protected]>
---
hadoop-common-project/hadoop-annotations/pom.xml | 8 +-
.../classification/tools/HadoopDocEnvImpl.java | 173 +++++++++++
.../classification/tools/RootDocProcessor.java | 319 +--------------------
.../classification/tools/StabilityOptions.java | 2 +-
.../manifest/ManifestCommitterConstants.java | 3 +-
.../manifest/impl/ManifestStoreOperations.java | 2 +-
hadoop-project/pom.xml | 19 +-
.../fs/azurebfs/utils/TracingHeaderVersion.java | 9 +
.../controller/AppDetailsController.java | 27 +-
.../appcatalog/controller/AppListController.java | 17 +-
.../appcatalog/controller/AppStoreController.java | 17 +-
.../hadoop/yarn/service/api/records/Component.java | 2 -
pom.xml | 31 +-
13 files changed, 274 insertions(+), 355 deletions(-)
diff --git a/hadoop-common-project/hadoop-annotations/pom.xml
b/hadoop-common-project/hadoop-annotations/pom.xml
index 86bc5d92379..599ca9893cc 100644
--- a/hadoop-common-project/hadoop-annotations/pom.xml
+++ b/hadoop-common-project/hadoop-annotations/pom.xml
@@ -42,10 +42,16 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
- <configuration>
+ <!-- override the parent config to use source and target
+ instead of release to use JDK internal API -->
+ <configuration combine.self="override">
+ <source>${javac.version}</source>
+ <target>${javac.version}</target>
<compilerArgs>
<arg>--add-modules</arg>
<arg>jdk.javadoc</arg>
+ <arg>--add-exports</arg>
+ <arg>jdk.javadoc/jdk.javadoc.internal.tool=ALL-UNNAMED</arg>
</compilerArgs>
</configuration>
</plugin>
diff --git
a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/HadoopDocEnvImpl.java
b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/HadoopDocEnvImpl.java
new file mode 100644
index 00000000000..e580cef5612
--- /dev/null
+++
b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/HadoopDocEnvImpl.java
@@ -0,0 +1,173 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.classification.tools;
+
+import com.sun.source.util.DocTrees;
+import jdk.javadoc.doclet.DocletEnvironment;
+import jdk.javadoc.internal.tool.DocEnvImpl;
+import jdk.javadoc.internal.tool.ToolEnvironment;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import javax.lang.model.SourceVersion;
+import javax.lang.model.element.AnnotationMirror;
+import javax.lang.model.element.Element;
+import javax.lang.model.element.ElementKind;
+import javax.lang.model.element.TypeElement;
+import javax.lang.model.util.Elements;
+import javax.lang.model.util.Types;
+import javax.tools.JavaFileManager;
+import javax.tools.JavaFileObject;
+import java.util.LinkedHashSet;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * This class extends Java internal DocEnvImpl to avoid cast error while
+ * migrating to JDK17. It filters elements based on Hadoop's InterfaceAudience
+ * and InterfaceStability annotations.
+ * This class depends on JDK internal implementation, so we might need to
+ * update the source code when upgrading to upper JDK versions.
+ */
+public class HadoopDocEnvImpl extends DocEnvImpl {
+
+ private final DocletEnvironment delegate;
+ private final String stability;
+ private final boolean treatUnannotatedClassesAsPrivate;
+
+ public HadoopDocEnvImpl(DocletEnvironment original, String stability,
+ boolean treatUnannotatedClassesAsPrivate) {
+ super(extractToolEnvironment(original), null);
+ this.delegate = original;
+ this.stability = stability;
+ this.treatUnannotatedClassesAsPrivate = treatUnannotatedClassesAsPrivate;
+ }
+
+ // Use original ToolEnvironment to avoid NullPointerException
+ private static ToolEnvironment extractToolEnvironment(DocletEnvironment
original) {
+ if (original instanceof DocEnvImpl impl) {
+ return impl.toolEnv;
+ }
+ throw new IllegalArgumentException(
+ "Expected DocEnvImpl but got: " + original.getClass().getName());
+ }
+
+ private boolean exclude(Element el) {
+ boolean sawPublic = false;
+
+ for (AnnotationMirror am : el.getAnnotationMirrors()) {
+ final String qname = am.getAnnotationType().toString();
+
+ if (qname.equals(InterfaceAudience.Private.class.getCanonicalName()) ||
qname.equals(
+ InterfaceAudience.LimitedPrivate.class.getCanonicalName())) {
+ return true;
+ }
+
+ if (stability.equals(StabilityOptions.EVOLVING_OPTION)) {
+ if
(qname.equals(InterfaceStability.Unstable.class.getCanonicalName())) {
+ return true;
+ }
+ }
+ if (stability.equals(StabilityOptions.STABLE_OPTION)) {
+ if (qname.equals(InterfaceStability.Unstable.class.getCanonicalName())
|| qname.equals(
+ InterfaceStability.Evolving.class.getCanonicalName())) {
+ return true;
+ }
+ }
+
+ if (qname.equals(InterfaceAudience.Public.class.getCanonicalName())) {
+ sawPublic = true;
+ }
+ }
+
+ if (sawPublic) {
+ return false;
+ }
+
+ if (treatUnannotatedClassesAsPrivate) {
+ ElementKind k = el.getKind();
+ if (k == ElementKind.CLASS || k == ElementKind.INTERFACE
+ || k == ElementKind.ANNOTATION_TYPE) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ @Override
+ public Set<? extends Element> getSpecifiedElements() {
+ Set<? extends Element> base = delegate.getSpecifiedElements();
+ return base.stream().filter(e -> !exclude(e))
+ .collect(Collectors.toCollection(LinkedHashSet::new));
+ }
+
+ @Override
+ public Set<? extends Element> getIncludedElements() {
+ Set<? extends Element> base = delegate.getIncludedElements();
+ return base.stream().filter(e -> !exclude(e))
+ .collect(Collectors.toCollection(LinkedHashSet::new));
+ }
+
+ @Override
+ public DocTrees getDocTrees() {
+ return delegate.getDocTrees();
+ }
+
+ @Override
+ public Elements getElementUtils() {
+ return delegate.getElementUtils();
+ }
+
+ @Override
+ public Types getTypeUtils() {
+ return delegate.getTypeUtils();
+ }
+
+ @Override
+ public boolean isIncluded(Element e) {
+ boolean base = delegate.isIncluded(e);
+ return base && !exclude(e);
+ }
+
+ @Override
+ public boolean isSelected(Element e) {
+ return delegate.isSelected(e);
+ }
+
+ @Override
+ public JavaFileManager getJavaFileManager() {
+ return delegate.getJavaFileManager();
+ }
+
+ @Override
+ public SourceVersion getSourceVersion() {
+ return delegate.getSourceVersion();
+ }
+
+ @Override
+ public DocletEnvironment.ModuleMode getModuleMode() {
+ return delegate.getModuleMode();
+ }
+
+ @Override
+ public JavaFileObject.Kind getFileKind(TypeElement type) {
+ return delegate.getFileKind(type);
+ }
+}
diff --git
a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java
b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java
index 741a1c7d068..39d5fa25d07 100644
---
a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java
+++
b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java
@@ -18,35 +18,10 @@
package org.apache.hadoop.classification.tools;
import jdk.javadoc.doclet.DocletEnvironment;
-import javax.lang.model.element.AnnotationMirror;
-import javax.lang.model.element.Element;
-import javax.lang.model.element.ElementKind;
-import javax.lang.model.element.ExecutableElement;
-import javax.lang.model.element.PackageElement;
-import javax.lang.model.element.TypeElement;
-import javax.lang.model.element.VariableElement;
-
-import java.lang.reflect.Array;
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.Method;
-import java.lang.reflect.Proxy;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.WeakHashMap;
-import java.util.stream.Collectors;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
/**
- * Process the {@link DocletEnvironment} by substituting with (nested) proxy
objects that
- * exclude elements with Private or LimitedPrivate annotations.
+ * Process the {@link DocletEnvironment} by filtering elements with
+ * Private or LimitedPrivate annotations using HadoopDocEnvImpl.
* <p>
* Based on code from
http://www.sixlegs.com/blog/java/exclude-javadoc-tag.html.
*/
@@ -63,7 +38,6 @@ private RootDocProcessor() {
// no instances
}
-
static String getStability() {
return stability;
}
@@ -77,293 +51,6 @@ static boolean isTreatUnannotatedClassesAsPrivate() {
}
public static DocletEnvironment process(DocletEnvironment root) {
- return (DocletEnvironment) wrap(root, DocletEnvironment.class);
- }
-
- private static final Map<Object, Object> PROXIES = new WeakHashMap<>();
-
- private static Object wrap(Object obj, Class<?> expectedType) {
- if (obj == null) {
- return null;
- }
-
- if (obj instanceof DocletEnvironment) {
- return getProxy(obj, new Class<?>[]{DocletEnvironment.class},
- new EnvHandler((DocletEnvironment) obj));
- }
-
- if (obj instanceof Element) {
- return getElementProxy((Element) obj);
- }
-
- if (obj instanceof Set) {
- return filterAndWrapIterable((Iterable<?>) obj, true);
- }
- if (obj instanceof Collection) {
- return filterAndWrapIterable((Iterable<?>) obj, false);
- }
- if (obj instanceof Iterable) {
- return filterAndWrapIterable((Iterable<?>) obj, false);
- }
-
- if (obj.getClass().isArray()) {
- int len = Array.getLength(obj);
- Object[] res = new Object[len];
- for (int i = 0; i < len; i++) {
- Object v = Array.get(obj, i);
- res[i] = wrap(v, v != null ? v.getClass() : Object.class);
- }
- return res;
- }
-
- return obj;
- }
-
- private static Object getElementProxy(Element el) {
- Object cached = PROXIES.get(el);
- if (cached != null) {
- return cached;
- }
-
- Set<Class<?>> ifaces = new LinkedHashSet<>();
- Collections.addAll(ifaces, el.getClass().getInterfaces());
- ifaces.add(Element.class);
- if (el instanceof TypeElement) {
- ifaces.add(TypeElement.class);
- }
- if (el instanceof PackageElement) {
- ifaces.add(PackageElement.class);
- }
- if (el instanceof ExecutableElement) {
- ifaces.add(ExecutableElement.class);
- }
- if (el instanceof VariableElement) {
- ifaces.add(VariableElement.class);
- }
-
- Object proxy = getProxy(el, ifaces.toArray(new Class<?>[0]), new
ElementHandler(el));
- PROXIES.put(el, proxy);
- return proxy;
- }
-
- private static Object getProxy(Object target, Class<?>[] ifaces,
InvocationHandler h) {
- Object cached = PROXIES.get(target);
- if (cached != null) {
- return cached;
- }
- Object p = Proxy.newProxyInstance(target.getClass().getClassLoader(),
ifaces, h);
- PROXIES.put(target, p);
- return p;
- }
-
- @SuppressWarnings("unchecked")
- private static Object filterAndWrapIterable(Iterable<?> iterable, boolean
preserveSet) {
- if (iterable == null) {
- return null;
- }
- if (preserveSet) {
- Set<Object> out = new LinkedHashSet<>();
- for (Object o : iterable) {
- if (o instanceof Element) {
- Element el = (Element) o;
- if (!exclude(el)) {
- out.add(getElementProxy(el));
- }
- } else {
- out.add(wrap(o, o != null ? o.getClass() : Object.class));
- }
- }
- return out;
- } else {
- List<Object> out = new ArrayList<>();
- for (Object o : iterable) {
- if (o instanceof Element) {
- Element el = (Element) o;
- if (!exclude(el)) {
- out.add(getElementProxy(el));
- }
- } else {
- out.add(wrap(o, o != null ? o.getClass() : Object.class));
- }
- }
- return out;
- }
- }
-
- private static Object unwrap(Object maybeProxy) {
- if (!(maybeProxy instanceof Proxy)) {
- return maybeProxy;
- }
- InvocationHandler ih = Proxy.getInvocationHandler(maybeProxy);
- if (ih instanceof BaseHandler) {
- return ((BaseHandler) ih).target;
- }
- return maybeProxy;
- }
-
- private static boolean exclude(Element el) {
- boolean sawPublic = false;
-
- for (AnnotationMirror am : el.getAnnotationMirrors()) {
- final String qname = am.getAnnotationType().toString();
-
- if (qname.equals(InterfaceAudience.Private.class.getCanonicalName())
- ||
qname.equals(InterfaceAudience.LimitedPrivate.class.getCanonicalName())) {
- return true;
- }
-
- if (stability.equals(StabilityOptions.EVOLVING_OPTION)) {
- if
(qname.equals(InterfaceStability.Unstable.class.getCanonicalName())) {
- return true;
- }
- }
- if (stability.equals(StabilityOptions.STABLE_OPTION)) {
- if (qname.equals(InterfaceStability.Unstable.class.getCanonicalName())
- ||
qname.equals(InterfaceStability.Evolving.class.getCanonicalName())) {
- return true;
- }
- }
-
- if (qname.equals(InterfaceAudience.Public.class.getCanonicalName())) {
- sawPublic = true;
- }
- }
-
- if (sawPublic) {
- return false;
- }
-
- if (isTreatUnannotatedClassesAsPrivate()) {
- ElementKind k = el.getKind();
- if (k == ElementKind.CLASS || k == ElementKind.INTERFACE ||
- k == ElementKind.ANNOTATION_TYPE) {
- return true;
- }
- }
-
- return false;
- }
-
- private static abstract class BaseHandler implements InvocationHandler {
- private final Object target;
-
- BaseHandler(Object target) {
- this.target = target;
- }
-
- protected Object getTarget() {
- return target;
- }
-
- Object wrapReturn(Object ret) {
- if (ret == null) {
- return null;
- }
- if (ret instanceof DocletEnvironment) {
- return wrap(ret, DocletEnvironment.class);
- }
- if (ret instanceof Element) {
- return getElementProxy((Element) ret);
- }
- if (ret instanceof Set) {
- return filterAndWrapIterable((Set<?>) ret, true);
- }
- if (ret instanceof Collection) {
- return filterAndWrapIterable((Collection<?>) ret, false);
- }
- if (ret instanceof Iterable) {
- return filterAndWrapIterable((Iterable<?>) ret, false);
- }
- if (ret.getClass().isArray()) {
- return wrap(ret, ret.getClass());
- }
- return ret;
- }
-
- Object[] unwrapArgs(Object[] args) {
- if (args == null) {
- return null;
- }
- Object[] r = new Object[args.length];
- for (int i = 0; i < args.length; i++) {
- r[i] = unwrap(args[i]);
- }
- return r;
- }
- }
-
- private static final class EnvHandler extends BaseHandler {
- private final DocletEnvironment env;
-
- EnvHandler(DocletEnvironment env) {
- super(env);
- this.env = env;
- }
-
- @Override
- public Object invoke(Object proxy, Method method, Object[] args) throws
Throwable {
- String name = method.getName();
- Object[] uargs = unwrapArgs(args);
-
- if ("getDocTrees".equals(name)) {
- return env.getDocTrees();
- } else if ("isIncluded".equals(name)) {
- Element e = (Element) uargs[0];
- boolean base = env.isIncluded(e);
- return base && !exclude(e);
- } else if ("getIncludedElements".equals(name)) {
- Set<? extends Element> base = env.getIncludedElements();
- return base.stream()
- .filter(e -> !exclude(e))
- .collect(Collectors.toCollection(LinkedHashSet::new));
- } else if ("getSpecifiedElements".equals(name)) {
- Set<? extends Element> base = env.getSpecifiedElements();
- return base.stream()
- .filter(e -> !exclude(e))
- .collect(Collectors.toCollection(LinkedHashSet::new));
- }
-
- Object ret = method.invoke(getTarget(), uargs);
- return wrapReturn(ret);
- }
- }
-
- private static final class ElementHandler extends BaseHandler {
- private final Element element;
-
- ElementHandler(Element element) {
- super(element);
- this.element = element;
- }
-
- @Override
- public Object invoke(Object proxy, Method method, Object[] args) throws
Throwable {
- String name = method.getName();
- Object[] uargs = unwrapArgs(args);
-
- if ("equals".equals(name) && uargs != null && uargs.length == 1) {
- return Objects.equals(element, unwrap(uargs[0]));
- }
- if ("hashCode".equals(name) && (uargs == null || uargs.length == 0)) {
- return element.hashCode();
- }
- if ("toString".equals(name) && (uargs == null || uargs.length == 0)) {
- return element.toString();
- }
-
- if ("getEnclosedElements".equals(name) && (uargs == null || uargs.length
== 0)) {
- List<? extends Element> enclosed = element.getEnclosedElements();
- List<Element> filtered = new ArrayList<>();
- for (Element e : enclosed) {
- if (!exclude(e)) {
- filtered.add(e);
- }
- }
- return filtered;
- }
-
- Object ret = method.invoke(getTarget(), uargs);
- return wrapReturn(ret);
- }
+ return new HadoopDocEnvImpl(root, stability,
treatUnannotatedClassesAsPrivate);
}
}
diff --git
a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
index c9d23ab472b..a0b755cd0a0 100644
---
a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
+++
b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
@@ -38,7 +38,7 @@ public final class StabilityOptions {
public static final String UNSTABLE_OPTION = "-unstable";
enum Level { STABLE, EVOLVING, UNSTABLE }
- private static volatile Level level = Level.STABLE;
+ private static volatile Level level = Level.UNSTABLE;
static void setLevel(Level l) {
if (l != null) {
diff --git
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/ManifestCommitterConstants.java
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/ManifestCommitterConstants.java
index 8f359e45000..716e7b6e2f3 100644
---
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/ManifestCommitterConstants.java
+++
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/ManifestCommitterConstants.java
@@ -221,7 +221,7 @@ public final class ManifestCommitterConstants {
public static final boolean OPT_DELETE_TARGET_FILES_DEFAULT = false;
/**
- * Name of the factory: {@value}.
+ * Name of the factory.
*/
public static final String MANIFEST_COMMITTER_FACTORY =
ManifestCommitterFactory.class.getName();
@@ -235,7 +235,6 @@ public final class ManifestCommitterConstants {
/**
* Default classname of the store operations.
- * Value: {@value}.
*/
public static final String STORE_OPERATIONS_CLASS_DEFAULT =
ManifestStoreOperationsThroughFileSystem.class.getName();
diff --git
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/impl/ManifestStoreOperations.java
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/impl/ManifestStoreOperations.java
index 03e3ce0f0ad..a7a72cda296 100644
---
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/impl/ManifestStoreOperations.java
+++
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/impl/ManifestStoreOperations.java
@@ -69,7 +69,7 @@ public void bindToFileSystem(FileSystem fileSystem, Path
path) throws IOExceptio
/**
* Is a path a file? Used during directory creation.
- * The is a copy & paste of FileSystem.isFile();
+ * The is a copy and paste of FileSystem.isFile();
* {@code StoreOperationsThroughFileSystem} calls into
* the FS direct so that stores which optimize their probes
* can save on IO.
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index d902fd489ed..b9e2fe4b864 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -2316,8 +2316,15 @@
<version>${maven-javadoc-plugin.version}</version>
<configuration>
<doclint>all</doclint>
+ <additionalJOptions>
+ <!-- To fix doclet runtime error -->
+
<additionalJOption>-J--add-exports=jdk.javadoc/jdk.javadoc.internal.tool=ALL-UNNAMED</additionalJOption>
+ </additionalJOptions>
<additionalOptions>
- <additionalOption>-Xmaxwarns 10000</additionalOption>
+ <!-- To fix doclet compile error -->
+ <additionalOption>--add-exports</additionalOption>
+
<additionalOption>jdk.javadoc/jdk.javadoc.internal.tool=ALL-UNNAMED</additionalOption>
+ <additionalOption>-Xmaxwarns 10000</additionalOption>
</additionalOptions>
</configuration>
</plugin>
@@ -2825,16 +2832,6 @@
</dependencies>
</dependencyManagement>
</profile>
- <profile>
- <id>jdk17+</id>
- <activation>
- <jdk>[17,)</jdk>
- </activation>
- <properties>
- <javac.version>17</javac.version>
- <maven.compiler.release>${javac.version}</maven.compiler.release>
- </properties>
- </profile>
<profile>
<id>quiet-surefire</id>
<activation>
diff --git
a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/TracingHeaderVersion.java
b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/TracingHeaderVersion.java
index c542e42827c..4ab66665f1b 100644
---
a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/TracingHeaderVersion.java
+++
b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/TracingHeaderVersion.java
@@ -34,27 +34,36 @@ public enum TracingHeaderVersion {
/**
* Version 1 of the tracing header, which includes a version prefix and has
13 permanent fields.
* This version is used for the current tracing header schema.
+ *
+ * <pre>{@code
* Schema: version:clientCorrelationId:clientRequestId:fileSystemId
* :primaryRequestId:streamId:opType:retryHeader:ingressHandler
*
:position:operatedBlobCount:operationSpecificHeader:httpOperationHeader
+ * }</pre>
*/
V1("v1", 13),
/**
* Version 2 of the tracing header, which includes a version prefix and has
16 permanent fields.
* This version is used for the current tracing header schema.
+ *
+ * <pre>{@code
* Schema: version:clientCorrelationId:clientRequestId:fileSystemId
* :primaryRequestId:streamId:opType:retryHeader:ingressHandler
*
:position:operatedBlobCount:operationSpecificHeader:httpOperationHeader
* :resourceUtilizationMetrics:fnsEndptConvertedIndicator
+ * }</pre>
*/
V2("v2", 15),
/**
* Version 0 of the aggregated metrics tracing header, which includes
* a version prefix and has 3 permanent fields.
* This version is used for the aggregated metrics tracing header schema.
+ *
+ * <pre>{@code
* Schema: metricsVersion:List<AggregatedMetrics>
* where AggregatedMetrics =
clientCorrelationId:fileSystemId:aggregated-metrics
* and AggregatedMetrics is enclosed within [] and separated by :
+ * }</pre>
*/
AV0("av0", 3);
diff --git
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppDetailsController.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppDetailsController.java
index 63aefa615ff..686f07fa471 100644
---
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppDetailsController.java
+++
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppDetailsController.java
@@ -52,6 +52,7 @@ public AppDetailsController() {
/**
* List detail information about the deployed application.
*
+ * <pre>{@code
* @apiGroup AppDetailController
* @apiName getDetails
* @api {get} /app_details/config/{id} Check config of application instance.
@@ -113,8 +114,10 @@ public AppDetailsController() {
* }
* }
* }
- * @param id - Application ID
- * @return application entry-
+ * }</pre>
+ *
+ * @param id Application ID
+ * @return application entry
*/
@Path("config/{id}")
@GET
@@ -127,10 +130,12 @@ public AppEntry getDetails(@PathParam("id") String id) {
/**
* Check application status.
*
+ * <pre>{@code
* @apiGroup AppDetailController
* @apiName getStatus
* @api {get} /app_details/status/{id} Check status of application instance.
* @apiParam {String} id Application ID to check.
+ * @apiSuccess {Object} text Give status
* @apiSuccessExample {json} Success-Response:
* HTTP/1.1 200 OK
* {
@@ -195,8 +200,9 @@ public AppEntry getDetails(@PathParam("id") String id) {
* }
* }
* }
- * @apiSuccess {Object} text Give status
- * @param id - Application ID
+ * }</pre>
+ *
+ * @param id Application ID
* @return application entry
*/
@Path("status/{id}")
@@ -213,13 +219,16 @@ public AppEntry getStatus(@PathParam("id") String id) {
/**
* Stop an application.
*
+ * <pre>{@code
* @apiGroup AppDetailController
* @apiName stopApp
* @api {post} /app_details/stop/{id} Stop one instance of application.
* @apiParam {String} id Application ID to stop.
* @apiSuccess {String} text Give deployment status
* @apiError BadRequest Requested application does not stop.
- * @param id - Application ID
+ * }</pre>
+ *
+ * @param id Application ID
* @return Web response code
*/
@Path("stop/{id}")
@@ -242,13 +251,16 @@ public Response stopApp(@PathParam("id") String id) {
/**
* Restart an application.
*
+ * <pre>{@code
* @apiGroup AppDetailController
* @apiName restartApp
* @api {post} /app_details/restart/{id} Restart one instance of
application.
* @apiParam {String} id Application ID to restart.
* @apiSuccess {String} text Give deployment status
* @apiError BadRequest Requested application does not restart.
- * @param id - Application ID
+ * }</pre>
+ *
+ * @param id Application ID
* @return Web response code
*/
@Path("restart/{id}")
@@ -271,12 +283,15 @@ public Response restartApp(@PathParam("id") String id) {
/**
* Upgrade an application.
*
+ * <pre>{@code
* @apiGroup AppDetailController
* @apiName upgradeApp
* @api {put} /app_details/upgrade/{id} Upgrade one instance of application.
* @apiParam {String} id Application Name to upgrade.
* @apiSuccess {String} text
* @apiError BadRequest Requested application does not upgrade.
+ * }</pre>
+ *
* @return Web response code
*/
@Path("upgrade/{id}")
diff --git
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppListController.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppListController.java
index d818f23cdcf..473d305cb15 100644
---
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppListController.java
+++
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppListController.java
@@ -51,6 +51,7 @@ public AppListController() {
/**
* Get Application List.
*
+ * <pre>{@code
* @apiGroup AppListController
* @apiName get
* @api {get} /app_list Get list of deployed applications.
@@ -116,7 +117,9 @@ public AppListController() {
* ...
* }
* ]
- * @return - Active application deployed by current user.
+ * }</pre>
+ *
+ * @return Active application deployed by current user.
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@@ -128,13 +131,16 @@ public List<AppEntry> getList() {
/**
* Delete an application.
*
+ * <pre>{@code
* @apiGroup AppListController
* @apiName delete
* @api {delete} /app_list Delete one instance of application.
* @apiParam {String} id Application name to delete.
* @apiSuccess {String} text Delete request accepted
- * @param id - application ID
- * @param name - application name
+ * }</pre>
+ *
+ * @param id application ID
+ * @param name application name
* @return Web response
*/
@DELETE
@@ -152,13 +158,16 @@ public Response delete(@PathParam("id") String id,
/**
* Deploy an application.
*
+ * <pre>{@code
* @apiGroup AppListController
* @apiName deploy
* @api {post} /app_list/{id} Deploy one instance of application.
* @apiParam {String} id Application ID to deploy.
* @apiSuccess {String} text Give deployment status
* @apiError BadRequest Unable to deploy requested application.
- * @param id - application ID
+ * }</pre>
+ *
+ * @param id application ID
* @return Web response
*/
@POST
diff --git
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppStoreController.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppStoreController.java
index 5abb548b5dc..1dff3b2f54a 100644
---
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppStoreController.java
+++
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/controller/AppStoreController.java
@@ -49,6 +49,7 @@ public AppStoreController() {
/**
* Display the most frequently used applications on YARN AppCatalog home
page.
*
+ * <pre>{@code
* @apiGroup AppStoreController
* @apiName get
* @api {get} /app_store/recommended Display recommended applications.
@@ -70,7 +71,9 @@ public AppStoreController() {
* ...
* }
* ]
- * @return - List of YARN applications
+ * }</pre>
+ *
+ * @return List of YARN applications
*/
@GET
@Path("recommended")
@@ -83,6 +86,7 @@ public List<AppStoreEntry> get() {
/**
* Search for yarn applications from solr.
*
+ * <pre>{@code
* @apiGroup AppStoreController
* @apiName search
* @api {get} /app_store/search Find application from appstore.
@@ -105,8 +109,10 @@ public List<AppStoreEntry> get() {
* ...
* }
* ]
- * @param keyword - search for keyword
- * @return - List of YARN applications matching keyword search.
+ * }</pre>
+ *
+ * @param keyword search for keyword
+ * @return List of YARN applications matching keyword search.
*/
@GET
@Path("search")
@@ -133,6 +139,7 @@ public AppStoreEntry get(@PathParam("id") String id) {
/**
* Register an application.
*
+ * <pre>{@code
* @apiGroup AppStoreController
* @apiName register
* @api {post} /app_store/register Register an application in appstore.
@@ -171,7 +178,9 @@ public AppStoreEntry get(@PathParam("id") String id) {
* }
* @apiSuccess {String} Response Application register result.
* @apiError BadRequest Error in process application registration.
- * @param app - Yarnfile in JSON form
+ * }</pre>
+ *
+ * @param app Yarnfile in JSON form
* @return Web response
*/
@POST
diff --git
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Component.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Component.java
index 8a5b7d402f7..1d66875d0a9 100644
---
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Component.java
+++
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Component.java
@@ -116,8 +116,6 @@ public class Component implements Serializable {
* ON_FAILURE (Only restart component instance if instance exit code !=
* 0);
* NEVER (Do not restart in any cases)
- *
- * @return restartPolicy
**/
@XmlType(name = "restart_policy")
@XmlEnum
diff --git a/pom.xml b/pom.xml
index 8a4688a74a3..1903969ac36 100644
--- a/pom.xml
+++ b/pom.xml
@@ -107,7 +107,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
https://maven.apache.org/x
<maven-dependency-plugin.version>3.8.1</maven-dependency-plugin.version>
<maven-enforcer-plugin.version>3.5.0</maven-enforcer-plugin.version>
<restrict-imports.enforcer.version>2.0.0</restrict-imports.enforcer.version>
- <maven-javadoc-plugin.version>3.0.1</maven-javadoc-plugin.version>
+ <maven-javadoc-plugin.version>3.8.0</maven-javadoc-plugin.version>
<maven-gpg-plugin.version>1.5</maven-gpg-plugin.version>
<maven-remote-resources-plugin.version>1.5</maven-remote-resources-plugin.version>
<maven-resources-plugin.version>3.0.1</maven-resources-plugin.version>
@@ -116,8 +116,8 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
https://maven.apache.org/x
<clover-maven-plugin.version>4.4.1</clover-maven-plugin.version>
<maven-bundle-plugin.version>2.5.0</maven-bundle-plugin.version>
<lifecycle-mapping.version>1.0.0</lifecycle-mapping.version>
- <maven-checkstyle-plugin.version>3.1.0</maven-checkstyle-plugin.version>
- <checkstyle.version>8.29</checkstyle.version>
+ <maven-checkstyle-plugin.version>3.6.0</maven-checkstyle-plugin.version>
+ <checkstyle.version>11.1.0</checkstyle.version>
<dependency-check-maven.version>7.1.1</dependency-check-maven.version>
<spotbugs.version>4.9.7</spotbugs.version>
<spotbugs-maven-plugin.version>4.9.7.0</spotbugs-maven-plugin.version>
@@ -548,6 +548,9 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
https://maven.apache.org/x
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
+ <configuration>
+ <release>${javac.version}</release>
+ </configuration>
</plugin>
</plugins>
</pluginManagement>
@@ -677,9 +680,9 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
https://maven.apache.org/x
<reportSet>
<id>aggregate</id>
<configuration>
- <maxmemory>1024m</maxmemory>
<quiet>true</quiet>
<verbose>false</verbose>
+ <legacyMode>true</legacyMode>
<source>${maven.compile.source}</source>
<charset>${maven.compile.encoding}</charset>
<reportOutputDirectory>${project.build.directory}/site</reportOutputDirectory>
@@ -712,8 +715,15 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
https://maven.apache.org/x
<version>${project.version}</version>
</docletArtifact>
</docletArtifacts>
- <useStandardDocletOptions>true</useStandardDocletOptions>
-
+ <additionalJOptions>
+ <!-- To fix doclet runtime error -->
+
<additionalJOption>-J--add-exports=jdk.javadoc/jdk.javadoc.internal.tool=ALL-UNNAMED</additionalJOption>
+ </additionalJOptions>
+ <additionalOptions>
+ <!-- To fix doclet compile error -->
+ <additionalOption>--add-exports</additionalOption>
+
<additionalOption>jdk.javadoc/jdk.javadoc.internal.tool=ALL-UNNAMED</additionalOption>
+ </additionalOptions>
<!-- switch on dependency-driven aggregation -->
<includeDependencySources>false</includeDependencySources>
@@ -721,7 +731,14 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
https://maven.apache.org/x
<!-- include ONLY dependencies I control -->
<dependencySourceInclude>org.apache.hadoop:hadoop-annotations</dependencySourceInclude>
</dependencySourceIncludes>
-
+ <additionalDependencies>
+ <additionalDependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${project.version}</version>
+ <classifier>tests</classifier>
+ </additionalDependency>
+ </additionalDependencies>
</configuration>
<reports>
<report>aggregate</report>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]