This is an automated email from the ASF dual-hosted git repository.
yhu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git
The following commit(s) were added to refs/heads/master by this push:
new a0674a97db6 [ErrorProne] Fix InvalidParam, InvalidInlineTag,
InvalidBlockTag, and InvalidLink checks (#37773)
a0674a97db6 is described below
commit a0674a97db6d88139fe8936448c8f8566a284fb8
Author: Radosław Stankiewicz <[email protected]>
AuthorDate: Wed Mar 11 21:44:56 2026 +0100
[ErrorProne] Fix InvalidParam, InvalidInlineTag, InvalidBlockTag, and
InvalidLink checks (#37773)
* Fix InvalidInlineTag, InvalidParam, InvalidBlockTag and InvalidLink
javadocs
* Fix JdbcUtil after merge
* spotless
* changes
* leave ignore block
* Fix InvalidLink and restore InvalidBlockTag to disabledChecks
* Remove duplicate entry
---
.../org/apache/beam/gradle/BeamModulePlugin.groovy | 3 ---
.../org/apache/beam/examples/webapis/ImageRequest.java | 2 +-
.../main/java/org/apache/beam/it/gcp/LoadTestBase.java | 4 ++--
.../org/apache/beam/it/gcp/storage/FileBasedIOLT.java | 2 +-
.../apache/beam/runners/core/StatefulDoFnRunner.java | 3 +--
.../beam/runners/direct/DirectTimerInternals.java | 4 ++--
.../beam/runners/flink/streaming/StreamSources.java | 2 +-
.../translation/wrappers/streaming/DoFnOperator.java | 4 ++--
.../translation/wrappers/streaming/DoFnOperator.java | 4 ++--
.../beam/runners/flink/streaming/StreamSources.java | 2 +-
.../translation/wrappers/streaming/DoFnOperator.java | 4 ++--
.../worker/DataflowElementExecutionTracker.java | 8 ++++----
.../dataflow/worker/DataflowExecutionContext.java | 4 ++--
.../MetricsToPerStepNamespaceMetricsConverter.java | 4 ++--
.../beam/runners/dataflow/worker/WorkUnitClient.java | 2 +-
.../worker/windmill/client/AbstractWindmillStream.java | 6 +++---
.../client/ResettableThrowingStreamObserver.java | 17 ++++++++---------
.../fnexecution/artifact/ArtifactStagingService.java | 1 -
.../org/apache/beam/runners/prism/PrismExecutor.java | 4 ++--
.../apache/beam/runners/prism/PrismPipelineResult.java | 2 +-
.../runners/samza/runtime/ClassicBundleManager.java | 6 ++----
.../translation/helpers/EncoderHelpers.java | 4 ++--
.../spark/translation/GroupCombineFunctions.java | 2 +-
.../main/java/org/apache/beam/sdk/io/FileSystem.java | 4 ++--
.../main/java/org/apache/beam/sdk/io/FileSystems.java | 4 ++--
.../org/apache/beam/sdk/metrics/MetricsEnvironment.java | 11 ++++++-----
.../sdk/schemas/logicaltypes/UnknownLogicalType.java | 2 +-
.../main/java/org/apache/beam/sdk/transforms/DoFn.java | 2 +-
.../java/org/apache/beam/sdk/transforms/Flatten.java | 2 +-
.../java/org/apache/beam/sdk/transforms/JsonToRow.java | 6 +++---
.../java/org/apache/beam/sdk/transforms/MapKeys.java | 1 -
.../beam/sdk/transforms/reflect/DoFnSignature.java | 2 +-
.../org/apache/beam/sdk/util/RowStringInterpolator.java | 8 +++-----
.../beam/sdk/util/construction/TransformUpgrader.java | 3 +--
.../beam/sdk/util/construction/UnknownCoderWrapper.java | 2 +-
.../org/apache/beam/sdk/util/SerializableUtilsTest.java | 3 ++-
.../beam/sdk/expansion/service/ExpansionService.java | 2 +-
.../apache/beam/sdk/extensions/gcp/util/GcsUtil.java | 2 +-
.../extensions/ordered/OrderedProcessingHandler.java | 2 +-
.../ordered/OrderedEventProcessorTestBase.java | 2 +-
.../sdk/extensions/sql/meta/catalog/CatalogManager.java | 4 ++--
.../org/apache/beam/sdk/extensions/sql/TestUtils.java | 2 +-
.../apache/beam/sdk/extensions/timeseries/FillGaps.java | 2 +-
.../SplittableSplitAndSizeRestrictionsDoFnRunner.java | 2 +-
.../beam/sdk/io/aws2/common/AsyncBatchWriteHandler.java | 4 ++--
.../org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java | 4 ++--
.../java/org/apache/beam/sdk/io/cdap/ConfigWrapper.java | 2 +-
.../sdk/io/cdap/PluginConfigInstantiationUtils.java | 4 ++--
.../test/java/org/apache/beam/sdk/io/cdap/CdapIOIT.java | 2 +-
.../beam/sdk/io/elasticsearch/ElasticsearchIO.java | 12 ++++++------
.../FileReadSchemaTransformFormatProviderTest.java | 2 +-
.../bigquery/AvroGenericRecordToStorageApiProto.java | 2 +-
.../sdk/io/gcp/bigquery/BeamRowToStorageApiProto.java | 2 +-
.../apache/beam/sdk/io/gcp/bigquery/BigQueryUtils.java | 2 +-
.../sdk/io/gcp/bigquery/RowMutationInformation.java | 16 ++++++++--------
.../sdk/io/gcp/bigquery/TableRowToStorageApiProto.java | 2 +-
.../org/apache/beam/sdk/io/gcp/pubsub/PubsubClient.java | 2 +-
.../beam/sdk/io/gcp/pubsub/PubsubRowToMessage.java | 8 ++++----
.../beam/sdk/io/gcp/pubsub/PubsubUnboundedSource.java | 2 +-
.../beam/sdk/io/gcp/spanner/ReadSpannerSchema.java | 2 +-
.../changestreams/action/QueryChangeStreamAction.java | 6 +++---
.../beam/sdk/io/gcp/bigquery/BigQueryIOWriteTest.java | 2 +-
.../beam/sdk/io/hbase/HBaseRowMutationsCoder.java | 2 +-
.../main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java | 6 ++----
.../test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java | 2 +-
.../org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java | 2 +-
.../java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java | 2 +-
.../apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java | 2 +-
.../apache/beam/sdk/io/singlestore/SingleStoreIO.java | 4 ++--
.../beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java | 5 +++--
70 files changed, 126 insertions(+), 137 deletions(-)
diff --git
a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
index 7f8d6ea965b..27bc588efaa 100644
--- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
+++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
@@ -1545,9 +1545,6 @@ class BeamModulePlugin implements Plugin<Project> {
"ExtendsAutoValue",
"InlineMeSuggester",
"InvalidBlockTag",
- "InvalidInlineTag",
- "InvalidLink",
- "InvalidParam",
"InvalidThrows",
"JavaTimeDefaultTimeZone",
"JavaUtilDate",
diff --git
a/examples/java/webapis/src/main/java/org/apache/beam/examples/webapis/ImageRequest.java
b/examples/java/webapis/src/main/java/org/apache/beam/examples/webapis/ImageRequest.java
index 41601c77f07..63a68a33419 100644
---
a/examples/java/webapis/src/main/java/org/apache/beam/examples/webapis/ImageRequest.java
+++
b/examples/java/webapis/src/main/java/org/apache/beam/examples/webapis/ImageRequest.java
@@ -54,7 +54,7 @@ abstract class ImageRequest implements Serializable {
return new AutoValue_ImageRequest.Builder();
}
- /** Build an {@link ImageRequest} from a {@param url}. */
+ /** Build an {@link ImageRequest} from a {@code url}. */
static ImageRequest of(String url) {
return builder().setImageUrl(url).setMimeType(mimeTypeOf(url)).build();
}
diff --git
a/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/LoadTestBase.java
b/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/LoadTestBase.java
index 51a4dc185dd..cd9ef52ed83 100644
---
a/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/LoadTestBase.java
+++
b/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/LoadTestBase.java
@@ -211,7 +211,7 @@ public abstract class LoadTestBase {
*
* @param metrics a map of raw metrics. The results are also appened in the
map.
* @param launchInfo Job info of the job
- * @param config a {@class MetricsConfiguration}
+ * @param config a {@link MetricsConfiguration}
*/
private void computeDataflowMetrics(
Map<String, Double> metrics, LaunchInfo launchInfo, MetricsConfiguration
config)
@@ -365,7 +365,7 @@ public abstract class LoadTestBase {
* Computes throughput metrics of the given pcollection in dataflow job.
*
* @param jobInfo dataflow job LaunchInfo
- * @param config the {@class MetricsConfiguration}
+ * @param config the {@link MetricsConfiguration}
* @param timeInterval interval for the monitoring query
* @return throughput metrics of the pcollection
*/
diff --git
a/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/storage/FileBasedIOLT.java
b/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/storage/FileBasedIOLT.java
index 2da8c0dc134..6a4b7848d50 100644
---
a/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/storage/FileBasedIOLT.java
+++
b/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/storage/FileBasedIOLT.java
@@ -251,7 +251,7 @@ public class FileBasedIOLT extends IOLoadTestBase {
/** Number of dynamic destinations to write. */
@JsonProperty public int numShards = 0;
- /** See {@class org.apache.beam.sdk.io.Compression}. */
+ /** See {@link org.apache.beam.sdk.io.Compression}. */
@JsonProperty public String compressionType = "UNCOMPRESSED";
/** Runner specified to run the pipeline. */
diff --git
a/runners/core-java/src/main/java/org/apache/beam/runners/core/StatefulDoFnRunner.java
b/runners/core-java/src/main/java/org/apache/beam/runners/core/StatefulDoFnRunner.java
index e562a4067d2..77913883466 100644
---
a/runners/core-java/src/main/java/org/apache/beam/runners/core/StatefulDoFnRunner.java
+++
b/runners/core-java/src/main/java/org/apache/beam/runners/core/StatefulDoFnRunner.java
@@ -49,8 +49,7 @@ import org.joda.time.Instant;
/**
* A customized {@link DoFnRunner} that handles late data dropping and garbage
collection for
* stateful {@link DoFn DoFns}. It registers a GC timer in {@link
#processElement(WindowedValue)}
- * and does cleanup in {@link #onTimer(String, String, BoundedWindow, Instant,
Instant, TimeDomain,
- * boolean)}
+ * and does cleanup in {@link #onTimer}
*
* @param <InputT> the type of the {@link DoFn} (main) input elements
* @param <OutputT> the type of the {@link DoFn} (main) output elements
diff --git
a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java
b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java
index db1112f7388..762a2338c3e 100644
---
a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java
+++
b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java
@@ -95,14 +95,14 @@ class DirectTimerInternals implements TimerInternals {
timeDomain));
}
- /** @deprecated use {@link #deleteTimer(StateNamespace, String,
TimeDomain)}. */
+ /** @deprecated use {@link #deleteTimer(StateNamespace, String, String,
TimeDomain)}. */
@Deprecated
@Override
public void deleteTimer(StateNamespace namespace, String timerId, String
timerFamilyId) {
throw new UnsupportedOperationException("Canceling of timer by ID is not
yet supported.");
}
- /** @deprecated use {@link #deleteTimer(StateNamespace, String,
TimeDomain)}. */
+ /** @deprecated use {@link #deleteTimer(StateNamespace, String, String,
TimeDomain)}. */
@Deprecated
@Override
public void deleteTimer(TimerData timerData) {
diff --git
a/runners/flink/1.19/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java
b/runners/flink/1.19/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java
index c03799d0953..793959c5e69 100644
---
a/runners/flink/1.19/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java
+++
b/runners/flink/1.19/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java
@@ -52,7 +52,7 @@ public class StreamSources {
@Override
default void emitWatermarkStatus(WatermarkStatus watermarkStatus) {}
- /** In Flink 1.19 the {@code emitRecordAttributes} method was added. */
+ /** In Flink 1.19 the {@code recordAttributes} method was added. */
@Override
default void emitRecordAttributes(RecordAttributes recordAttributes) {
throw new UnsupportedOperationException("emitRecordAttributes not
implemented");
diff --git
a/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
b/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
index 5c0780b8f83..fcafbd60048 100644
---
a/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
+++
b/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
@@ -1655,7 +1655,7 @@ public class DoFnOperator<PreInputT, InputT, OutputT>
}
}
- /** @deprecated use {@link #deleteTimer(StateNamespace, String,
TimeDomain)}. */
+ /** @deprecated use {@link #deleteTimer(StateNamespace, String, String,
TimeDomain)}. */
@Deprecated
@Override
public void deleteTimer(StateNamespace namespace, String timerId, String
timerFamilyId) {
@@ -1672,7 +1672,7 @@ public class DoFnOperator<PreInputT, InputT, OutputT>
}
}
- /** @deprecated use {@link #deleteTimer(StateNamespace, String,
TimeDomain)}. */
+ /** @deprecated use {@link #deleteTimer(StateNamespace, String, String,
TimeDomain)}. */
@Override
@Deprecated
public void deleteTimer(TimerData timer) {
diff --git
a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
index a4df72485cc..e582635e098 100644
---
a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
+++
b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
@@ -1655,7 +1655,7 @@ public class DoFnOperator<PreInputT, InputT, OutputT>
}
}
- /** @deprecated use {@link #deleteTimer(StateNamespace, String,
TimeDomain)}. */
+ /** @deprecated use {@link #deleteTimer(StateNamespace, String, String,
TimeDomain)}. */
@Deprecated
@Override
public void deleteTimer(StateNamespace namespace, String timerId, String
timerFamilyId) {
@@ -1672,7 +1672,7 @@ public class DoFnOperator<PreInputT, InputT, OutputT>
}
}
- /** @deprecated use {@link #deleteTimer(StateNamespace, String,
TimeDomain)}. */
+ /** @deprecated use {@link #deleteTimer(StateNamespace, String, String,
TimeDomain)}. */
@Override
@Deprecated
public void deleteTimer(TimerData timer) {
diff --git
a/runners/flink/2.0/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java
b/runners/flink/2.0/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java
index a39af17766f..5f9a1f1dd47 100644
---
a/runners/flink/2.0/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java
+++
b/runners/flink/2.0/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java
@@ -52,7 +52,7 @@ public class StreamSources {
@Override
default void emitWatermarkStatus(WatermarkStatus watermarkStatus) {}
- /** In Flink 1.19 the {@code emitRecordAttributes} method was added. */
+ /** In Flink 1.19 the {@code recordAttributes} method was added. */
@Override
default void emitRecordAttributes(RecordAttributes recordAttributes) {
throw new UnsupportedOperationException("emitRecordAttributes not
implemented");
diff --git
a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
index a8e140b8a2c..7cf364d1469 100644
---
a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
+++
b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java
@@ -1658,7 +1658,7 @@ public class DoFnOperator<PreInputT, InputT, OutputT>
}
}
- /** @deprecated use {@link #deleteTimer(StateNamespace, String,
TimeDomain)}. */
+ /** @deprecated use {@link #deleteTimer(StateNamespace, String, String,
TimeDomain)}. */
@Deprecated
@Override
public void deleteTimer(StateNamespace namespace, String timerId, String
timerFamilyId) {
@@ -1675,7 +1675,7 @@ public class DoFnOperator<PreInputT, InputT, OutputT>
}
}
- /** @deprecated use {@link #deleteTimer(StateNamespace, String,
TimeDomain)}. */
+ /** @deprecated use {@link #deleteTimer(StateNamespace, String, String,
TimeDomain)}. */
@Override
@Deprecated
public void deleteTimer(TimerData timer) {
diff --git
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowElementExecutionTracker.java
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowElementExecutionTracker.java
index ccc98fa87e6..dffd10806af 100644
---
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowElementExecutionTracker.java
+++
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowElementExecutionTracker.java
@@ -100,14 +100,14 @@ public class DataflowElementExecutionTracker extends
ElementExecutionTracker {
/** Marker execution to represent when there is no element currently being
processed. */
static final ElementExecution IDLE = new ElementExecution();
- /** Only empty for {@see IDLE}. */
+ /** Only empty for {@link #IDLE}. */
final Optional<NameContext> step;
ElementExecution(NameContext step) {
this.step = Optional.of(step);
}
- /** Only used for {@see IDLE}. */
+ /** Only used for {@link #IDLE}. */
private ElementExecution() {
step = Optional.empty();
}
@@ -155,8 +155,8 @@ public class DataflowElementExecutionTracker extends
ElementExecutionTracker {
* Journal of fragments of execution per element to count for attributing
processing time. Each
* time we transition up or down the stage fusion graph we add an
execution fragment for the
* currently processing element with an incremented snapshot version. Each
snapshot version must
- * have a representative value in the {@code executionJournal}, or {@see
IDLE_EXECUTION} to
- * represent completion of processing.
+ * have a representative value in the {@code executionJournal}, or {@link
ElementExecution#IDLE}
+ * to represent completion of processing.
*/
private final Journal<ElementExecution> executionJournal;
diff --git
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowExecutionContext.java
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowExecutionContext.java
index cd9a222b487..3dc3293aa26 100644
---
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowExecutionContext.java
+++
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowExecutionContext.java
@@ -104,8 +104,8 @@ public abstract class DataflowExecutionContext<T extends
DataflowStepContext> {
* PCollectionView PCollectionViews}.
*
* <p>If side input source metadata is provided by the service in {@link
SideInputInfo
- * sideInputInfos}, we request a {@link SideInputReader} from the {@code
executionContext} using
- * that info. If no side input source metadata is provided but the DoFn
expects side inputs, as a
+ * sideInputInfos}, we request a {@link SideInputReader} from the execution
context using that
+ * info. If no side input source metadata is provided but the DoFn expects
side inputs, as a
* fallback, we request a {@link SideInputReader} based only on the expected
views.
*
* <p>These cases are not disjoint: Whenever a {@link GroupAlsoByWindowFn}
takes side inputs,
diff --git
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricsToPerStepNamespaceMetricsConverter.java
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricsToPerStepNamespaceMetricsConverter.java
index 356781e11e5..0ab6a0537d9 100644
---
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricsToPerStepNamespaceMetricsConverter.java
+++
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricsToPerStepNamespaceMetricsConverter.java
@@ -171,8 +171,8 @@ public class MetricsToPerStepNamespaceMetricsConverter {
/**
* @param metricName The {@link MetricName} that represents this Histogram.
- * @param value The histogram value. Currently we only support converting
histograms that use
- * {@code linear} or {@code exponential} buckets.
+ * @param inputHistogram The histogram value. Currently we only support
converting histograms that
+ * use {@code linear} or {@code exponential} buckets.
* @return If this conversion succeeds, a {@code MetricValue} that
represents this histogram.
* Otherwise returns an empty optional.
*/
diff --git
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkUnitClient.java
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkUnitClient.java
index 26b1dc55ead..e159e7fbd2e 100644
---
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkUnitClient.java
+++
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkUnitClient.java
@@ -75,7 +75,7 @@ public interface WorkUnitClient {
* Reports the worker messages to dataflow. We currently report autoscaling
signals and
* perworkermetrics with this path.
*
- * @param msg the WorkerMessages to report
+ * @param messages the WorkerMessages to report
* @return a list of {@link WorkerMessageResponse}
*/
List<WorkerMessageResponse> reportWorkerMessage(List<WorkerMessage>
messages) throws IOException;
diff --git
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java
index 7dec8d1ed6c..7d74b868056 100644
---
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java
+++
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java
@@ -58,10 +58,10 @@ import org.slf4j.Logger;
* broken stream.
*
* <p>Subclasses should override {@link #newResponseHandler()} to implement a
handler for physical
- * stream connection. {@link #onNewStream()} to perform any work that must be
done when a new stream
- * is created, such as sending headers or retrying requests.
+ * stream connection. {@link #onFlushPending(boolean)} to perform any work
that must be done when a
+ * new stream is created, such as sending headers or retrying requests.
*
- * <p>{@link #trySend(RequestT)} and {@link #startStream()} should not be
called when handling
+ * <p>{@link #trySend(Object)} and {@link #startStream()} should not be called
when handling
* responses; use {@link #executeSafely(Runnable)} instead.
*
* <p>Synchronization on this is used to synchronize the gRpc stream state and
internal data
diff --git
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/ResettableThrowingStreamObserver.java
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/ResettableThrowingStreamObserver.java
index b027a6cac7b..90a479c8f85 100644
---
a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/ResettableThrowingStreamObserver.java
+++
b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/ResettableThrowingStreamObserver.java
@@ -28,13 +28,12 @@ import
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Precondit
import org.slf4j.Logger;
/**
- * Request observer that allows resetting its internal delegate using a {@link
- * #streamObserverFactory}.
+ * Request observer that allows resetting its internal delegate.
*
- * @implNote {@link StreamObserver}s generated by {@link
#streamObserverFactory} are expected to be
- * {@link ThreadSafe}. Has same methods declared in {@link
StreamObserver}, but they throw
- * {@link StreamClosedException} and {@link
WindmillStreamShutdownException}, which much be
- * handled by callers.
+ * @implNote {@link StreamObserver}s injected via {@link
#reset(TerminatingStreamObserver)} are
+ * expected to be {@link ThreadSafe}. Has same methods declared in {@link
StreamObserver}, but
+ * they throw {@link StreamClosedException} and {@link
WindmillStreamShutdownException}, which
+ * much be handled by callers.
*/
@ThreadSafe
@Internal
@@ -49,9 +48,9 @@ final class ResettableThrowingStreamObserver<T> {
private boolean isPoisoned = false;
/**
- * Indicates that the current delegate is closed via {@link #poison() or
{@link #onCompleted()}}.
- * If not poisoned, a call to {@link #reset()} is required to perform future
operations on the
- * StreamObserver.
+ * Indicates that the current delegate is closed via {@link #poison()} or
{@link #onCompleted()}.
+ * If not poisoned, a call to {@link #reset(TerminatingStreamObserver)} is
required to perform
+ * future operations on the StreamObserver.
*/
@GuardedBy("this")
private boolean isCurrentStreamClosed = true;
diff --git
a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java
b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java
index 0e38abb1b78..30a49b2968b 100644
---
a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java
+++
b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java
@@ -486,7 +486,6 @@ public class ArtifactStagingService
/**
* Attempts to provide a reasonable filename for the artifact.
*
- * @param index a monotonically increasing index, which provides
uniqueness
* @param environment the environment id
* @param artifact the artifact itself
*/
diff --git
a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismExecutor.java
b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismExecutor.java
index 111d937fcbf..87551cfc03c 100644
---
a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismExecutor.java
+++
b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismExecutor.java
@@ -110,7 +110,7 @@ abstract class PrismExecutor {
/**
* Execute the {@link ProcessBuilder} that starts the Prism service.
Redirects output to the
- * {@param outputStream}.
+ * {@code outputStream}.
*/
void execute(OutputStream outputStream) throws IOException {
execute(createProcessBuilder().redirectErrorStream(true));
@@ -127,7 +127,7 @@ abstract class PrismExecutor {
/**
* Execute the {@link ProcessBuilder} that starts the Prism service.
Redirects output to the
- * {@param file}.
+ * {@code file}.
*/
void execute(File file) throws IOException {
execute(
diff --git
a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismPipelineResult.java
b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismPipelineResult.java
index 7508e505725..c4a43710e5e 100644
---
a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismPipelineResult.java
+++
b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismPipelineResult.java
@@ -32,7 +32,7 @@ class PrismPipelineResult implements PipelineResult {
private final Runnable cleanup;
/**
- * Instantiate the {@link PipelineResult} from the {@param delegate} and a
{@param cancel} to be
+ * Instantiate the {@link PipelineResult} from the {@code delegate} and a
{@code cancel} to be
* called when stopping the underlying executable Job management service.
*/
PrismPipelineResult(PipelineResult delegate, Runnable cancel) {
diff --git
a/runners/samza/src/main/java/org/apache/beam/runners/samza/runtime/ClassicBundleManager.java
b/runners/samza/src/main/java/org/apache/beam/runners/samza/runtime/ClassicBundleManager.java
index e55eb2cc34c..53b6968e111 100644
---
a/runners/samza/src/main/java/org/apache/beam/runners/samza/runtime/ClassicBundleManager.java
+++
b/runners/samza/src/main/java/org/apache/beam/runners/samza/runtime/ClassicBundleManager.java
@@ -40,13 +40,11 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
- * Implementation of BundleManager for non-portable mode. Keeps track of the
async function
- * completions.
+ * {@inheritDoc} Implementation of BundleManager for non-portable mode. Keeps
track of the async
+ * function completions.
*
* <p>This class is not thread safe and the current implementation relies on
the assumption that
* messages are dispatched to BundleManager in a single threaded mode.
- *
- * <p>{@inheritDoc}
*/
@SuppressWarnings({
"nullness" // TODO(https://github.com/apache/beam/issues/20497)
diff --git
a/runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpers.java
b/runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpers.java
index 8385be377a1..daf8451faac 100644
---
a/runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpers.java
+++
b/runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpers.java
@@ -174,10 +174,10 @@ public class EncoderHelpers {
/**
* Creates a Spark {@link Encoder} for {@link T} of {@link StructType} with
fields {@code value},
- * {@code timestamp}, {@code windows} and {@code pane}.
+ * {@code timestamp}, {@code window} and {@code pane}.
*
* @param value {@link Encoder} to encode field `{@code value}`.
- * @param window {@link Encoder} to encode individual windows in field
`{@code windows}`
+ * @param window {@link Encoder} to encode individual windows in field
`{@code window}`
*/
public static <T, W extends BoundedWindow> Encoder<WindowedValue<T>>
windowedValueEncoder(
Encoder<T> value, Encoder<W> window) {
diff --git
a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java
b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java
index 03f7885f7e9..1488bed3231 100644
---
a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java
+++
b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java
@@ -88,7 +88,7 @@ public class GroupCombineFunctions {
/**
* Spark-level group by key operation that keeps original Beam {@link KV}
pairs unchanged.
*
- * @returns {@link JavaPairRDD} where the first value in the pair is the
serialized key, and the
+ * @return {@link JavaPairRDD} where the first value in the pair is the
serialized key, and the
* second is an iterable of the {@link KV} pairs with that key.
*/
static <K, V> JavaPairRDD<ByteArray, Iterable<WindowedValue<KV<K, V>>>>
groupByKeyPair(
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystem.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystem.java
index 73caa7284e9..00dd2e367ce 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystem.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystem.java
@@ -46,9 +46,9 @@ public abstract class FileSystem<ResourceIdT extends
ResourceId> {
* <p>Implementation should handle the following ambiguities of a
user-provided spec:
*
* <ol>
- * <li>{@code spec} could be a glob or a uri. {@link #match} should be
able to tell and choose
+ * <li>{@code specs} could be a glob or a uri. {@link #match} should be
able to tell and choose
* efficient implementations.
- * <li>The user-provided {@code spec} might refer to files or directories.
It is common that
+ * <li>The user-provided {@code specs} might refer to files or
directories. It is common that
* users that wish to indicate a directory will omit the trailing
{@code /}, such as in a
* spec of {@code "/tmp/dir"}. The {@link FileSystem} should be able
to recognize a
* directory with the trailing {@code /} omitted, but should always
return a correct {@link
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java
index 7e2940a2c35..155df53c6c2 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java
@@ -100,9 +100,9 @@ public class FileSystems {
* <p>Implementation handles the following ambiguities of a user-provided
spec:
*
* <ol>
- * <li>{@code spec} could be a glob or a uri. {@link #match} should be
able to tell and choose
+ * <li>{@code specs} could be a glob or a uri. {@link #match} should be
able to tell and choose
* efficient implementations.
- * <li>The user-provided {@code spec} might refer to files or directories.
It is common that
+ * <li>The user-provided {@code specs} might refer to files or
directories. It is common that
* users that wish to indicate a directory will omit the trailing path
delimiter, such as
* {@code "/tmp/dir"} in Linux. The {@link FileSystem} should be able
to recognize a
* directory with the trailing path delimiter omitted, but should
always return a correct
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java
index 3421bb4afc8..2a88dd0025d 100644
---
a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java
+++
b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java
@@ -164,12 +164,13 @@ public class MetricsEnvironment {
}
}
- /**
- * Set the {@link MetricsContainer} for the associated {@link
MetricsEnvironment}.
- *
- * @return The previous container for the associated {@link
MetricsEnvironment}.
- */
+ /** Set the {@link MetricsContainer} for the associated {@link
MetricsEnvironment}. */
public interface MetricsEnvironmentState {
+ /**
+ * Activates the given container.
+ *
+ * @return The previous container for the associated {@link
MetricsEnvironment}.
+ */
@Nullable
MetricsContainer activate(@Nullable MetricsContainer metricsContainer);
}
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/UnknownLogicalType.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/UnknownLogicalType.java
index af19f8f33e5..104cbdeefb1 100644
---
a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/UnknownLogicalType.java
+++
b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/UnknownLogicalType.java
@@ -26,7 +26,7 @@ import org.apache.beam.sdk.schemas.Schema.FieldType;
*
* <p>Java transforms and JVM runners should take care when processing these
types as they may have
* a particular semantic meaning in the context that created them. For
example, consider an
- * enumerated type backed by a primitive {@class FieldType.INT8}. A Java
transform can clearly pass
+ * enumerated type backed by a primitive {@code FieldType.INT8}. A Java
transform can clearly pass
* through this value and pass it back to a context that understands it, but
that transform should
* not blindly perform arithmetic on this type.
*/
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
index 107f61d7b67..41beb93a5cb 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
@@ -1049,7 +1049,7 @@ public abstract class DoFn<InputT extends @Nullable
Object, OutputT extends @Nul
* RestrictionTracker.HasProgress} implementation within the {@link
RestrictionTracker} is an
* inaccurate representation of known work.
*
- * <p>It is up to each splittable {@DoFn} to convert between their natural
representation of
+ * <p>It is up to each splittable {@link DoFn} to convert between their
natural representation of
* outstanding work and this representation. For example:
*
* <ul>
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
index 159f92cd5e8..8bd2b23befe 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java
@@ -92,7 +92,7 @@ public class Flatten {
* {@code other} and then applying {@link #pCollections()}, but has the
advantage that it can be
* more easily used inline.
*
- * <p>Both {@cpde PCollections} must have equal {@link WindowFn}s. The
output elements of {@code
+ * <p>Both {@code PCollection}s must have equal {@link WindowFn}s. The
output elements of {@code
* Flatten<T>} are in the same windows and have the same timestamps as their
corresponding input
* elements. The output {@code PCollection} will have the same {@link
WindowFn} as both inputs.
*
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/JsonToRow.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/JsonToRow.java
index d812d299a83..69667929dad 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/JsonToRow.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/JsonToRow.java
@@ -142,16 +142,16 @@ public class JsonToRow {
*
* <p>Then access the parsed results via, {@link ParseResult#getResults()}
*
- * <p>{@Code PCollection<Row> personRows = results.getResults()}
+ * <p>{@code PCollection<Row> personRows = results.getResults()}
*
* <p>And access the failed to parse results via, {@link
ParseResult#getFailedToParseLines()}
*
- * <p>{@Code PCollection<Row> errorsLines = results.getFailedToParseLines()}
+ * <p>{@code PCollection<Row> errorsLines = results.getFailedToParseLines()}
*
* <p>This will produce a Row with Schema {@link
JsonToRowWithErrFn#ERROR_ROW_SCHEMA}
*
* <p>To access the reason for the failure you will need to first enable
extended error reporting.
- * {@link JsonToRowWithErrFn#withExtendedErrorInfo()} {@Code ParseResult
results =
+ * {@link JsonToRowWithErrFn#withExtendedErrorInfo()} {@code ParseResult
results =
*
jsonPersons.apply(JsonToRow.withExceptionReporting(PERSON_SCHEMA).withExtendedErrorInfo());
}
*
* <p>This will provide access to the reason for the Parse failure. The call
to {@link
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapKeys.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapKeys.java
index cb6ee84c9aa..b93e6e288c7 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapKeys.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapKeys.java
@@ -129,7 +129,6 @@ public class MapKeys<K1, K2, V> extends
PTransform<PCollection<KV<K1, V>>, PColl
* .<String, String>via(word -> 1 / word.length) // Could
throw ArithmeticException
* .exceptionsVia(
* new InferableFunction<ExceptionElement<KV<String,
String>>, String>() {
- * @Override
* public String apply(ExceptionElement<KV<String,
String>> input) {
* return input.exception().getMessage();
* }
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java
index 35f71d69010..8f254642f08 100644
---
a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java
+++
b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java
@@ -741,7 +741,7 @@ public abstract class DoFnSignature {
}
/**
- * Descriptor for a (@link Parameter} of type {@link DoFn.Element} where
the type does not match
+ * Descriptor for a {@link Parameter} of type {@link DoFn.Element} where
the type does not match
* the DoFn's input type. This implies that the input must have a schema
that is compatible.
*/
@AutoValue
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RowStringInterpolator.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RowStringInterpolator.java
index 513772e8ec3..0398f4d63ae 100644
---
a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RowStringInterpolator.java
+++
b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RowStringInterpolator.java
@@ -27,8 +27,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.beam.sdk.schemas.Schema;
-import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.values.Row;
import org.apache.beam.sdk.values.ValueInSingleWindow;
import
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.MoreObjects;
@@ -60,9 +58,9 @@ import org.joda.time.Instant;
* // output --> "unified batch and streaming!"
* }</pre>
*
- * <p>Additionally, {@link #interpolate(Row, BoundedWindow, PaneInfo,
Instant)} can be used in
- * streaming scenarios to substitute windowing metadata into the template
String. To make use of
- * this, use the relevant placeholder:
+ * <p>Additionally, {@link #interpolate(ValueInSingleWindow)} can be used in
streaming scenarios to
+ * substitute windowing metadata into the template String. To make use of
this, use the relevant
+ * placeholder:
*
* <ul>
* <li>$WINDOW: the window's string representation
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java
index 4268c6c7067..c2b12255e42 100644
---
a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java
+++
b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java
@@ -466,8 +466,7 @@ public class TransformUpgrader implements AutoCloseable {
* A utility method that converts a byte array obtained by invoking {@link
#toByteArray(Object)}
* back to a Java object.
*
- * @param bytes a {@code byte} array generated by invoking the {@link
#toByteArray(Object)}
- * method.
+ * @param bytes an array of bytes generated by invoking the {@link
#toByteArray(Object)} method.
* @return re-generated object.
*/
public static Object fromByteArray(byte[] bytes) throws
InvalidClassException {
diff --git
a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnknownCoderWrapper.java
b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnknownCoderWrapper.java
index 8510d8dd0c1..9126ab77a88 100644
---
a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnknownCoderWrapper.java
+++
b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnknownCoderWrapper.java
@@ -24,7 +24,7 @@ import org.apache.beam.sdk.coders.AtomicCoder;
import org.apache.beam.sdk.coders.CoderException;
/**
- * Represents a {@coder Coder} that is not defined in Java SDK, for example, a
coder that is
+ * Represents a {@code Coder} that is not defined in Java SDK, for example, a
coder that is
* available in an external SDK that cannot be fully interpretted in the Java
SDK.
*/
public class UnknownCoderWrapper extends AtomicCoder<Object> {
diff --git
a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
index 1f3ec0f427b..9925c365367 100644
---
a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
+++
b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java
@@ -146,7 +146,8 @@ public class SerializableUtilsTest {
}
/**
- * a sample class to test framework serialization, {@see
SerializableUtilsTest#customClassLoader}.
+ * a sample class to test framework serialization, {@link
+ * SerializableUtilsTest#customClassLoader}.
*/
public static class Foo implements Serializable {}
}
diff --git
a/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java
b/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java
index c3c3ccfd326..ae658d93955 100644
---
a/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java
+++
b/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java
@@ -440,7 +440,7 @@ public class ExpansionService extends
ExpansionServiceGrpc.ExpansionServiceImplB
*
* <p>If no Schema is registered, {@link ConfigT} must have a zero-argument
constructor and
* setters corresponding to each field in the row encoded by {@code
payload}. Note {@link ConfigT}
- * may have additional setters not represented in the {@ocde payload} schema.
+ * may have additional setters not represented in the {@code payload} schema.
*
* <p>Exposed for testing only. No backwards compatibility guarantees.
*/
diff --git
a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java
b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java
index 396fde45298..e3f01dd8529 100644
---
a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java
+++
b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java
@@ -118,7 +118,7 @@ public class GcsUtil {
return delegate.fileSize(path);
}
- /** @deprecated use {@link #getBlob(GcsPath)}. */
+ /** @deprecated use {@link #getBlob(GcsPath, BlobGetOption...)}. */
@Deprecated
public StorageObject getObject(GcsPath gcsPath) throws IOException {
return delegate.getObject(gcsPath);
diff --git
a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/OrderedProcessingHandler.java
b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/OrderedProcessingHandler.java
index 058c01f019d..832dc5e427c 100644
---
a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/OrderedProcessingHandler.java
+++
b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/OrderedProcessingHandler.java
@@ -38,7 +38,7 @@ import org.joda.time.Duration;
*
* <p>There are two types of processing - when the sequence numbers are
contiguous per key and these
* sequences per keys are independent of each other, and when there is a
global sequence shared by
- * all keys. In case of the global sequence processing the custom handler must
extend from {@see
+ * all keys. In case of the global sequence processing the custom handler must
extend from {@link
* OrderedProcessingGlobalSequenceHandler}.
*
* @param <EventT> type of events to be processed
diff --git
a/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/OrderedEventProcessorTestBase.java
b/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/OrderedEventProcessorTestBase.java
index cdf222fadf5..a16f32c9919 100644
---
a/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/OrderedEventProcessorTestBase.java
+++
b/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/OrderedEventProcessorTestBase.java
@@ -115,7 +115,7 @@ public class OrderedEventProcessorTestBase {
/**
* The majority of the tests use this method. Testing is done in the global
window.
*
- * @throws @UnknownKeyFor @NonNull @Initialized CannotProvideCoderException
+ * @throws CannotProvideCoderException
*/
protected void doTest(
Event[] events,
diff --git
a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java
b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java
index 808449de5d5..858dbcd5bf7 100644
---
a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java
+++
b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java
@@ -31,8 +31,8 @@ import org.checkerframework.checker.nullness.qual.Nullable;
* <p>Implementations should have a way of determining which catalog is
currently active, and
* produce it when {@link #currentCatalog()} is invoked.
*
- * <p>When {@link #registerTableProvider(String, TableProvider)} is called,
the provider should
- * become available for all catalogs.
+ * <p>When {@link #registerTableProvider(TableProvider)} is called, the
provider should become
+ * available for all catalogs.
*/
@Internal
public interface CatalogManager {
diff --git
a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
index ed717b7f6e9..ca780440863 100644
---
a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
+++
b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
@@ -93,7 +93,7 @@ public class TestUtils {
* )
* }</pre>
*
- * @args pairs of column type and column names.
+ * @param args pairs of column type and column names.
*/
public static RowsBuilder of(final Object... args) {
Schema beamSQLSchema = TestTableUtils.buildBeamSqlSchema(args);
diff --git
a/sdks/java/extensions/timeseries/src/main/java/org/apache/beam/sdk/extensions/timeseries/FillGaps.java
b/sdks/java/extensions/timeseries/src/main/java/org/apache/beam/sdk/extensions/timeseries/FillGaps.java
index 9fd40852184..31b45bbbf4d 100644
---
a/sdks/java/extensions/timeseries/src/main/java/org/apache/beam/sdk/extensions/timeseries/FillGaps.java
+++
b/sdks/java/extensions/timeseries/src/main/java/org/apache/beam/sdk/extensions/timeseries/FillGaps.java
@@ -83,7 +83,7 @@ import org.joda.time.Instant;
*
* <p>By default, the latest element from the previous bucket is propagated
into missing buckets. The user can override
* this using the {@link #withMergeFunction} method. Several built-in merge
functions are provided for -
- * {@link #keepLatest()} (the default), {@link #keepEarliest()}, an {@link
#keepNull()}.
+ * {@link #keepLatest()} (the default), {@link #keepEarliest()}, and {@code
keepNull()}.
*
* <p>Sometimes elements need to be modified before being propagated into a
missing bucket. For example, consider the
* following element type containing a timestamp:
diff --git
a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/SplittableSplitAndSizeRestrictionsDoFnRunner.java
b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/SplittableSplitAndSizeRestrictionsDoFnRunner.java
index fac83485d22..528864d9e90 100644
---
a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/SplittableSplitAndSizeRestrictionsDoFnRunner.java
+++
b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/SplittableSplitAndSizeRestrictionsDoFnRunner.java
@@ -74,7 +74,7 @@ import org.joda.time.Instant;
* Double>>}
* </ul>
*
- * <p>In addition to this, it passes {@Code OutputReceiver<RestrictionT>} to
the DoFn GetRestriction
+ * <p>In addition to this, it passes {@code OutputReceiver<RestrictionT>} to
the DoFn GetRestriction
* method.
*/
@Internal
diff --git
a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/common/AsyncBatchWriteHandler.java
b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/common/AsyncBatchWriteHandler.java
index 95a65fe1165..74319151e3d 100644
---
a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/common/AsyncBatchWriteHandler.java
+++
b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/common/AsyncBatchWriteHandler.java
@@ -286,8 +286,8 @@ public abstract class AsyncBatchWriteHandler<RecT, ResT> {
* already been attempted by the AWS SDK in that case.
* </ul>
*
- * The next call of {@link #checkForAsyncFailure()}, {@link
#batchWrite(String, List< RecT >)}} or
- * {@link #waitForCompletion()} will check for the last async failure and
throw it. Afterwards the
+ * The next call of {@link #checkForAsyncFailure()}, {@link
#batchWrite(String, List)} or {@link
+ * #waitForCompletion()} will check for the last async failure and throw it.
Afterwards the
* failure state is reset.
*/
private class RetryHandler implements BiConsumer<List<ResT>, Throwable> {
diff --git
a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java
b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java
index 2de4a47ebae..cca6a452a0e 100644
---
a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java
+++
b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java
@@ -490,8 +490,8 @@ public final class KinesisIO {
/**
* Specifies the {@code WatermarkPolicyFactory} as
ArrivalTimeWatermarkPolicyFactory.
*
- * <p>{@param watermarkIdleDurationThreshold} Denotes the duration for
which the watermark can
- * be idle.
+ * <p>{@code watermarkIdleDurationThreshold} Denotes the duration for
which the watermark can be
+ * idle.
*/
public Read withArrivalTimeWatermarkPolicy(Duration
watermarkIdleDurationThreshold) {
return toBuilder()
diff --git
a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/ConfigWrapper.java
b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/ConfigWrapper.java
index b073e275be3..66236622812 100644
---
a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/ConfigWrapper.java
+++
b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/ConfigWrapper.java
@@ -29,7 +29,7 @@ import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-/** Class for building {@link PluginConfig} object of the specific class
{@param <T>}. */
+/** Class for building {@link PluginConfig} object of the specific class
{@code <T>}. */
public class ConfigWrapper<T extends PluginConfig> {
private static final Logger LOG =
LoggerFactory.getLogger(ConfigWrapper.class);
diff --git
a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/PluginConfigInstantiationUtils.java
b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/PluginConfigInstantiationUtils.java
index ced11201019..bc54d154fb0 100644
---
a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/PluginConfigInstantiationUtils.java
+++
b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/PluginConfigInstantiationUtils.java
@@ -41,9 +41,9 @@ public class PluginConfigInstantiationUtils {
private static final String MACRO_FIELDS_FIELD_NAME = "macroFields";
/**
- * Method for instantiating {@link PluginConfig} object of specific class
{@param configClass}.
+ * Method for instantiating {@link PluginConfig} object of specific class
{@code configClass}.
* After instantiating, it will go over all {@link Field}s with the {@link
Name} annotation and
- * set the appropriate parameter values from the {@param params} map for
them.
+ * set the appropriate parameter values from the {@code params} map for them.
*
* @param params map of config fields, where key is the name of the field,
value must be String or
* boxed primitive
diff --git
a/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/CdapIOIT.java
b/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/CdapIOIT.java
index d4955ac43b4..bd666dd7cf2 100644
--- a/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/CdapIOIT.java
+++ b/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/CdapIOIT.java
@@ -72,7 +72,7 @@ import org.testcontainers.utility.DockerImageName;
/**
* IO Integration test for {@link org.apache.beam.sdk.io.cdap.CdapIO}.
*
- * <p>{@see
https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests}
for
+ * <p>{@link
https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests}
for
* more details.
*/
@RunWith(JUnit4.class)
diff --git
a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
index f3d6122f2a0..1cebd404bab 100644
---
a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
+++
b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
@@ -533,7 +533,7 @@ public class ElasticsearchIO {
/**
* If Elasticsearch authentication is enabled, provide an API key. Be
aware that you can only
- * use one of {@Code withApiToken()}, {@code withBearerToken()} and {@code
withDefaultHeaders}
+ * use one of {@code withApiToken()}, {@code withBearerToken()} and {@code
withDefaultHeaders}
* at the same time, as they (potentially) use the same header.
*
* @param apiKey the API key used to authenticate to Elasticsearch
@@ -549,7 +549,7 @@ public class ElasticsearchIO {
/**
* If Elasticsearch authentication is enabled, provide a bearer token. Be
aware that you can
- * only use one of {@Code withApiToken()}, {@code withBearerToken()} and
{@code
+ * only use one of {@code withApiToken()}, {@code withBearerToken()} and
{@code
* withDefaultHeaders} at the same time, as they (potentially) use the
same header.
*
* @param bearerToken the bearer token used to authenticate to
Elasticsearch
@@ -1793,7 +1793,7 @@ public class ElasticsearchIO {
* Providing this hint means there is no need for setting {@link
* DocToBulk#withConnectionConfiguration}. This can also be very useful
for testing purposes.
*
- * <p>Note: if the value of @param backendVersion differs from the version
the destination
+ * <p>Note: if the value of {@code backendVersion} differs from the
version the destination
* cluster is running, behavior is undefined and likely to yield errors.
*
* @param backendVersion the major version number of the version of
Elasticsearch being run in
@@ -2486,7 +2486,7 @@ public class ElasticsearchIO {
/**
* Provide a set of textual error types which can be contained in Bulk API
response
- * items[].error.type field. Any element in @param
allowableResponseErrorTypes will suppress
+ * items[].error.type field. Any element in {@code
allowableResponseErrorTypes} will suppress
* errors of the same type in Bulk responses.
*
* <p>See also
@@ -2543,7 +2543,7 @@ public class ElasticsearchIO {
* batches are maintained per-key-per-window. BE AWARE that low values for
@param
* maxParallelRequests, in particular if the input data has a finite
number of windows, can
* reduce parallelism greatly. Because data will be temporarily globally
windowed as part of
- * writing data to Elasticsearch, if @param maxParallelRequests is set to
1, there will only
+ * writing data to Elasticsearch, if {@code maxParallelRequests} is set to
1, there will only
* ever be 1 request in flight. Having only a single request in flight can
be beneficial for
* ensuring an Elasticsearch cluster is not overwhelmed by parallel
requests, but may not work
* for all use cases. If this number is less than the number of maximum
workers in your
@@ -2566,7 +2566,7 @@ public class ElasticsearchIO {
* batches are maintained per-key-per-window. BE AWARE that low values for
@param
* maxParallelRequests, in particular if the input data has a finite
number of windows, can
* reduce parallelism greatly. Because data will be temporarily globally
windowed as part of
- * writing data to Elasticsearch, if @param maxParallelRequests is set to
1, there will only
+ * writing data to Elasticsearch, if {@code maxParallelRequests} is set to
1, there will only
* ever be 1 request in flight. Having only a single request in flight can
be beneficial for
* ensuring an Elasticsearch cluster is not overwhelmed by parallel
requests, but may not work
* for all use cases. If this number is less than the number of maximum
workers in your
diff --git
a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java
b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java
index d01e0051c7b..54c885f5415 100644
---
a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java
+++
b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java
@@ -57,7 +57,7 @@ import org.junit.rules.TestName;
public abstract class FileReadSchemaTransformFormatProviderTest {
- /** Returns the format of the {@linke
FileReadSchemaTransformFormatProviderTest} subclass. */
+ /** Returns the format of the {@link
FileReadSchemaTransformFormatProviderTest} subclass. */
protected abstract String getFormat();
/** Given a Beam Schema, returns the relevant source's String schema
representation. */
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AvroGenericRecordToStorageApiProto.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AvroGenericRecordToStorageApiProto.java
index 35751e2758e..4cf8f9c73ba 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AvroGenericRecordToStorageApiProto.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AvroGenericRecordToStorageApiProto.java
@@ -267,7 +267,7 @@ public class AvroGenericRecordToStorageApiProto {
}
/**
- * Forwards {@param changeSequenceNum} to {@link
#messageFromGenericRecord(Descriptor,
+ * Forwards {@code changeSequenceNum} to {@link
#messageFromGenericRecord(Descriptor,
* GenericRecord, String, String)} via {@link Long#toHexString}.
*/
public static DynamicMessage messageFromGenericRecord(
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BeamRowToStorageApiProto.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BeamRowToStorageApiProto.java
index a4d20707304..36e1d77b67b 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BeamRowToStorageApiProto.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BeamRowToStorageApiProto.java
@@ -146,7 +146,7 @@ public class BeamRowToStorageApiProto {
.build();
/**
- * Forwards (@param changeSequenceNum) to {@link
#messageFromBeamRow(Descriptor, Row, String,
+ * Forwards ({@code changeSequenceNum}) to {@link
#messageFromBeamRow(Descriptor, Row, String,
* String)} via {@link Long#toHexString}.
*/
public static DynamicMessage messageFromBeamRow(
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtils.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtils.java
index 2c6704fb654..74032c36438 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtils.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtils.java
@@ -449,7 +449,7 @@ public class BigQueryUtils {
* <p>Supports both standard and legacy SQL types.
*
* @param schema Schema of the type returned
- * @param nestedFields Nested fields for the given type (eg. RECORD type)
+ * @param options Options for schema conversion
* @return Corresponding Beam {@link FieldType}
*/
private static FieldType fromTableFieldSchemaType(
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/RowMutationInformation.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/RowMutationInformation.java
index 18905d149b9..c7001c622fc 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/RowMutationInformation.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/RowMutationInformation.java
@@ -57,10 +57,10 @@ public abstract class RowMutationInformation {
public abstract String getChangeSequenceNumber();
/**
- * Instantiate {@link RowMutationInformation} with {@link MutationType} and
the {@param
+ * Instantiate {@link RowMutationInformation} with {@link MutationType} and
the {@code
* sequenceNumber}. @deprecated - instantiates {@link
RowMutationInformation} via {@link
- * #of(MutationType, String)} forwarding the {@param sequenceNumber} value
using {@link
- * Long#toHexString(long)}. {@param sequenceNumber} values {@code < 0} will
throw an error.
+ * #of(MutationType, String)} forwarding the {@code sequenceNumber} value
using {@link
+ * Long#toHexString(long)}. {@code sequenceNumber} values {@code < 0} will
throw an error.
*/
@Deprecated
public static RowMutationInformation of(MutationType mutationType, long
sequenceNumber) {
@@ -70,11 +70,11 @@ public abstract class RowMutationInformation {
}
/**
- * Instantiate {@link RowMutationInformation} with {@link MutationType} and
the {@param
+ * Instantiate {@link RowMutationInformation} with {@link MutationType} and
the {@code
* changeSequenceNumber}, which sets the BigQuery API {@code
_CHANGE_SEQUENCE_NUMBER} pseudo
* column, enabling custom user-supplied ordering of {@link RowMutation}s.
*
- * <p>Requirements for the {@param changeSequenceNumber}:
+ * <p>Requirements for the {@code changeSequenceNumber}:
*
* <ul>
* <li>fixed format {@code String} in hexadecimal format
@@ -87,12 +87,12 @@ public abstract class RowMutationInformation {
* FFFFFFFFFFFFFFFF/FFFFFFFFFFFFFFFF/FFFFFFFFFFFFFFFF/FFFFFFFFFFFFFFFF}
* </ul>
*
- * <p>Below are some {@param changeSequenceNumber} scenarios:
+ * <p>Below are some {@code changeSequenceNumber} scenarios:
*
* <table>
* <tr>
- * <th>Record #1: {@param changeSequenceNumber}</th>
- * <th>Record #2: {@param changeSequenceNumber}</th>
+ * <th>Record #1: {@code changeSequenceNumber}</th>
+ * <th>Record #2: {@code changeSequenceNumber}</th>
* <th>BigQuery API compares as</th>
* </tr>
* <tr>
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java
index 640de93bd08..646dfdd873f 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java
@@ -819,7 +819,7 @@ public class TableRowToStorageApiProto {
}
/**
- * Forwards {@param changeSequenceNum} to {@link
#messageFromTableRow(SchemaInformation,
+ * Forwards {@code changeSequenceNum} to {@link
#messageFromTableRow(SchemaInformation,
* Descriptor, TableRow, boolean, boolean, TableRow, String, String)} via
{@link
* Long#toHexString}.
*/
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubClient.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubClient.java
index bd01604643e..3e2206d59c0 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubClient.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubClient.java
@@ -101,7 +101,7 @@ public abstract class PubsubClient implements Closeable {
/**
* Return the timestamp (in ms since unix epoch) to use for a Pubsub message
with {@code
- * timestampAttribute} and {@code attriutes}.
+ * timestampAttribute} and {@code attributes}.
*
* <p>The message attributes must contain {@code timestampAttribute}, and
the value of that
* attribute will be taken as the timestamp.
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubRowToMessage.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubRowToMessage.java
index 58849fd3f3b..da3ee89de0d 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubRowToMessage.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubRowToMessage.java
@@ -120,7 +120,7 @@ abstract class PubsubRowToMessage extends
PTransform<PCollection<Row>, PCollecti
/**
* As a convenience method, generates {@link InputSchemaFactory} for
expected {@link Schema} for
* {@link Row} input into {@link PubsubRowToMessage}. The {@link Field} for
{@link
- * #getPayloadKeyName()} is excluded for null {@param payloadFieldType}. See
{@link
+ * #getPayloadKeyName()} is excluded for null {@code payloadFieldType}. See
{@link
* InputSchemaFactory#buildSchema(Field...)} for details on how to add
additional fields.
*/
InputSchemaFactory inputSchemaFactory(@Nullable FieldType payloadFieldType) {
@@ -308,7 +308,7 @@ abstract class PubsubRowToMessage extends
PTransform<PCollection<Row>, PCollecti
this.schema = schema;
}
- /** Returns true of all {@param fieldMatchers} {@link
FieldMatcher#match(Schema)}. */
+ /** Returns true of all {@code fieldMatchers} {@link
FieldMatcher#match(Schema)}. */
boolean matchesAll(FieldMatcher... fieldMatchers) {
for (FieldMatcher fieldMatcher : fieldMatchers) {
if (!fieldMatcher.match(schema)) {
@@ -318,7 +318,7 @@ abstract class PubsubRowToMessage extends
PTransform<PCollection<Row>, PCollecti
return true;
}
- /** Returns true of any {@param fieldMatchers} {@link
FieldMatcher#match(Schema)}. */
+ /** Returns true of any {@code fieldMatchers} {@link
FieldMatcher#match(Schema)}. */
boolean matchesAny(FieldMatcher... fieldMatchers) {
for (FieldMatcher fieldMatcher : fieldMatchers) {
if (fieldMatcher.match(schema)) {
@@ -558,7 +558,7 @@ abstract class PubsubRowToMessage extends
PTransform<PCollection<Row>, PCollecti
/**
* Builds a {@link Schema} from {@link #getAttributesField()} and {@link
#getTimestampField()}
- * and {@param additionalFields}. Users are encouraged to use the {@link
#removeFields(Schema,
+ * and {@code additionalFields}. Users are encouraged to use the {@link
#removeFields(Schema,
* String...)} method to customize the resulting {@link Schema}.
*/
Schema buildSchema(Field... additionalFields) {
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSource.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSource.java
index b9a554d54ad..bc91e9f381d 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSource.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSource.java
@@ -581,7 +581,7 @@ public class PubsubUnboundedSource extends
PTransform<PBegin, PCollection<Pubsub
/**
* BLOCKING NACK (ie request deadline extension of 0) receipt of messages
from Pubsub with the
- * given {@code ockIds}. Does not retain {@code ackIds}.
+ * given {@code ackIds}. Does not retain {@code ackIds}.
*/
public void nackBatch(long nowMsSinceEpoch, List<String> ackIds) throws
IOException {
pubsubClient.get().modifyAckDeadline(subscription, ackIds, 0);
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchema.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchema.java
index 11220b69b7b..6085431396d 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchema.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchema.java
@@ -45,7 +45,7 @@ public class ReadSpannerSchema extends DoFn<Void,
SpannerSchema> {
private transient SpannerAccessor spannerAccessor;
/**
- * Constructor for creating an instance of the ReadSpannerSchema class. If
no {@param
+ * Constructor for creating an instance of the ReadSpannerSchema class. If
no {@code
* allowedTableNames} is passed, every single table is allowed.
*
* @param config The SpannerConfig object that contains the configuration
for accessing the
diff --git
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/QueryChangeStreamAction.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/QueryChangeStreamAction.java
index 69e89e74a38..e81d8aef473 100644
---
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/QueryChangeStreamAction.java
+++
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/QueryChangeStreamAction.java
@@ -104,9 +104,9 @@ public class QueryChangeStreamAction {
* @param dataChangeRecordAction action class to process {@link
DataChangeRecord}s
* @param heartbeatRecordAction action class to process {@link
HeartbeatRecord}s
* @param childPartitionsRecordAction action class to process {@link
ChildPartitionsRecord}s
- * @param PartitionStartRecordAction action class to process {@link
PartitionStartRecord}s
- * @param PartitionEndRecordAction action class to process {@link
PartitionEndRecord}s
- * @param PartitionEventRecordAction action class to process {@link
PartitionEventRecord}s
+ * @param partitionStartRecordAction action class to process {@link
PartitionStartRecord}s
+ * @param partitionEndRecordAction action class to process {@link
PartitionEndRecord}s
+ * @param partitionEventRecordAction action class to process {@link
PartitionEventRecord}s
* @param metrics metrics gathering class
* @param isMutableChangeStream whether the change stream is mutable or not
*/
diff --git
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOWriteTest.java
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOWriteTest.java
index 624683e4756..3a467de3804 100644
---
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOWriteTest.java
+++
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOWriteTest.java
@@ -1932,7 +1932,7 @@ public class BigQueryIOWriteTest implements Serializable {
}
}
- /** Coder for @link{PartitionedGlobalWindow}. */
+ /** Coder for {@link PartitionedGlobalWindow}. */
private static class PartitionedGlobalWindowCoder extends
AtomicCoder<PartitionedGlobalWindow> {
@Override
public void encode(PartitionedGlobalWindow window, OutputStream outStream)
throws IOException {
diff --git
a/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseRowMutationsCoder.java
b/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseRowMutationsCoder.java
index 6d66cee2110..673463cf5a7 100644
---
a/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseRowMutationsCoder.java
+++
b/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseRowMutationsCoder.java
@@ -92,7 +92,7 @@ class HBaseRowMutationsCoder extends
StructuredCoder<RowMutations> implements Se
* which is asserted equal in this coder 2. Canonical encoding is maintained
regardless of object
* machine or time context
*
- * @throws @UnknownKeyFor@NonNull@Initialized NonDeterministicException
+ * @throws NonDeterministicException
*/
@Override
public void verifyDeterministic() {}
diff --git
a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java
b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java
index 6cc3e6f3b9a..6cfece500ad 100644
--- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java
+++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java
@@ -455,12 +455,10 @@ public class JdbcUtil {
return calendar;
}
- /**
- * Returns a {@code JdbcReadPartitionsHelper} instance associated with the
given {@param type}.
- */
+ /** Returns a {@code JdbcReadPartitionsHelper} instance associated with the
given {@code type}. */
static <T> @Nullable JdbcReadWithPartitionsHelper<T>
getPartitionsHelper(TypeDescriptor<T> type) {
// This cast is unchecked, thus this is a small type-checking risk. We
just need
- // to make sure that all preset helpers in `JdbcUtil.PRESET_HELPERS` are
matched
+ // to make sure that all preset helpers in {@code PRESET_HELPERS} are
matched
// in type from their Key and their Value.
return (JdbcReadWithPartitionsHelper<T>)
PRESET_HELPERS.get(type.getRawType());
}
diff --git
a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java
b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java
index 5f53d324300..2c712b39752 100644
--- a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java
+++ b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java
@@ -416,7 +416,7 @@ public class JdbcIOIT {
}
/**
- * @return {@link JdbcIO.Write} transform that writes to {@param tableName}
Postgres table and
+ * @return {@link JdbcIO.Write} transform that writes to {@code tableName}
Postgres table and
* returns all fields of modified rows.
*/
private static JdbcIO.Write<KV<Integer, String>>
getJdbcWriteWithReturning(String tableName) {
diff --git
a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java
b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java
index 68ab866277f..a8d290237b3 100644
---
a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java
+++
b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java
@@ -116,7 +116,7 @@ import org.slf4j.LoggerFactory;
* and will only return a non-null {@link
org.apache.beam.sdk.transforms.splittabledofn.SplitResult}
* for a checkpoint. To the extent possible in the SDK, this reduces the risk
of overwriting
* committed offsets when {@code enable.auto.commit} is set and prevents
concurrent use of
- * per-{@TopicPartition} cached {@link Consumer} resources.
+ * per-{@link TopicPartition} cached {@link Consumer} resources.
*
* <p>TODO(https://github.com/apache/beam/issues/20280): Add support for
initial splitting.
*
diff --git
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java
index b1133eadb1c..b7426c336d6 100644
---
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java
+++
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java
@@ -128,7 +128,7 @@ import org.testcontainers.utility.DockerImageName;
/**
* IO Integration test for {@link org.apache.beam.sdk.io.kafka.KafkaIO}.
*
- * <p>{@see
https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests}
for
+ * <p>{@link
https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests}
for
* more details.
*
* <p>NOTE: This test sets retention policy of the messages so that all
messages are retained in the
diff --git
a/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java
b/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java
index 669cc3c43f2..73008a015c4 100644
---
a/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java
+++
b/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java
@@ -52,7 +52,7 @@ class ExchangeTestPlan {
* @param read Read semantics to use for a test
* @param maxRecordsRead Maximum messages to be processed by Beam within a
test
* @param numRecordsToPublish Number of messages that will be published to
the exchange as part of
- * a test. Note that this will frequently be the same value as {@code
numRecordsRead} in which
+ * a test. Note that this will frequently be the same value as {@code
maxRecordsRead} in which
* case it's simpler to use {@link #ExchangeTestPlan(RabbitMqIO.Read,
int)}, but when testing
* topic exchanges or exchanges where not all messages will be routed to
the queue being read
* from, these numbers will differ.
diff --git
a/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreIO.java
b/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreIO.java
index b8057d87968..db5ae34d737 100644
---
a/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreIO.java
+++
b/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreIO.java
@@ -321,8 +321,8 @@ public class SingleStoreIO {
* <p>NOTE - The "user" and "password" properties can be add via {@link
#withUsername(String)},
* {@link #withPassword(String)}, so they do not need to be included here.
*
- * <p>Full list of supported properties can be found here {@link <a
- *
href="https://docs.singlestore.com/managed-service/en/developer-resources/connect-with-application-development-tools/connect-with-java-jdbc/the-singlestore-jdbc-driver.html#connection-string-parameters">...</a>}
+ * <p>Full list of supported properties can be found here <a
+ *
href="https://docs.singlestore.com/managed-service/en/developer-resources/connect-with-application-development-tools/connect-with-java-jdbc/the-singlestore-jdbc-driver.html#connection-string-parameters">...</a>
*/
public DataSourceConfiguration withConnectionProperties(String
connectionProperties) {
checkNotNull(connectionProperties, "connectionProperties can not be
null");
diff --git
a/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java
b/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java
index 32258934d0d..77e20ef49b9 100644
---
a/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java
+++
b/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java
@@ -77,8 +77,9 @@ import org.testcontainers.utility.DockerImageName;
/**
* IO Integration test for {@link
org.apache.beam.sdk.io.sparkreceiver.SparkReceiverIO}.
*
- * <p>{@see
https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests}
for
- * more details.
+ * <p>See <a
+ *
href="https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests">https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests</a>
+ * for more details.
*
* <p>NOTE: This test sets retention policy of the messages so that all
messages are retained in the
* topic so that we could read them back after writing.