From 14fe7dc9a922c79d97787ae3e4cbb8461d0b126f Mon Sep 17 00:00:00 2001 From: Radek Stankiewicz Date: Wed, 4 Mar 2026 21:44:45 +0100 Subject: [PATCH 1/7] Fix InvalidInlineTag, InvalidParam, InvalidBlockTag and InvalidLink javadocs --- .../beam/examples/webapis/ImageRequest.java | 2 +- .../org/apache/beam/it/gcp/LoadTestBase.java | 4 ++-- .../beam/it/gcp/storage/FileBasedIOLT.java | 2 +- .../beam/runners/core/StatefulDoFnRunner.java | 3 +-- .../runners/direct/DirectTimerInternals.java | 4 ++-- .../runners/flink/streaming/StreamSources.java | 2 +- .../wrappers/streaming/DoFnOperator.java | 4 ++-- .../wrappers/streaming/DoFnOperator.java | 4 ++-- .../runners/flink/streaming/StreamSources.java | 2 +- .../wrappers/streaming/DoFnOperator.java | 4 ++-- .../worker/DataflowElementExecutionTracker.java | 8 ++++---- .../worker/DataflowExecutionContext.java | 4 ++-- ...tricsToPerStepNamespaceMetricsConverter.java | 4 ++-- .../runners/dataflow/worker/WorkUnitClient.java | 2 +- .../windmill/client/AbstractWindmillStream.java | 4 ++-- .../ResettableThrowingStreamObserver.java | 17 ++++++++--------- .../artifact/ArtifactStagingService.java | 1 - .../beam/runners/prism/PrismExecutor.java | 4 ++-- .../beam/runners/prism/PrismPipelineResult.java | 2 +- .../samza/runtime/ClassicBundleManager.java | 6 ++---- .../translation/helpers/EncoderHelpers.java | 4 ++-- .../translation/GroupCombineFunctions.java | 2 +- .../java/org/apache/beam/sdk/io/FileSystem.java | 4 ++-- .../org/apache/beam/sdk/io/FileSystems.java | 4 ++-- .../logicaltypes/UnknownLogicalType.java | 2 +- .../org/apache/beam/sdk/transforms/DoFn.java | 2 +- .../org/apache/beam/sdk/transforms/Flatten.java | 2 +- .../apache/beam/sdk/transforms/JsonToRow.java | 6 +++--- .../sdk/transforms/reflect/DoFnSignature.java | 2 +- .../beam/sdk/util/RowStringInterpolator.java | 8 +++----- .../util/construction/TransformUpgrader.java | 3 +-- .../util/construction/UnknownCoderWrapper.java | 2 +- .../beam/sdk/util/SerializableUtilsTest.java | 3 ++- .../sdk/expansion/service/ExpansionService.java | 2 +- .../beam/sdk/extensions/gcp/util/GcsUtil.java | 2 +- .../ordered/OrderedProcessingHandler.java | 2 +- .../ordered/OrderedEventProcessorTestBase.java | 2 +- .../sql/meta/catalog/CatalogManager.java | 4 ++-- .../beam/sdk/extensions/sql/TestUtils.java | 2 +- .../sdk/extensions/timeseries/FillGaps.java | 2 +- ...tableSplitAndSizeRestrictionsDoFnRunner.java | 2 +- .../beam/sdk/io/aws2/kinesis/KinesisIO.java | 4 ++-- .../apache/beam/sdk/io/cdap/ConfigWrapper.java | 2 +- .../io/cdap/PluginConfigInstantiationUtils.java | 4 ++-- .../org/apache/beam/sdk/io/cdap/CdapIOIT.java | 2 +- .../sdk/io/elasticsearch/ElasticsearchIO.java | 12 ++++++------ ...leReadSchemaTransformFormatProviderTest.java | 2 +- .../AvroGenericRecordToStorageApiProto.java | 2 +- .../gcp/bigquery/BeamRowToStorageApiProto.java | 2 +- .../beam/sdk/io/gcp/bigquery/BigQueryUtils.java | 2 +- .../io/gcp/bigquery/RowMutationInformation.java | 16 ++++++++-------- .../gcp/bigquery/TableRowToStorageApiProto.java | 2 +- .../beam/sdk/io/gcp/pubsub/PubsubClient.java | 2 +- .../sdk/io/gcp/pubsub/PubsubRowToMessage.java | 8 ++++---- .../io/gcp/pubsub/PubsubUnboundedSource.java | 2 +- .../sdk/io/gcp/spanner/ReadSpannerSchema.java | 2 +- .../action/QueryChangeStreamAction.java | 6 +++--- .../io/gcp/bigquery/BigQueryIOWriteTest.java | 2 +- .../sdk/io/hbase/HBaseRowMutationsCoder.java | 2 +- .../org/apache/beam/sdk/io/jdbc/JdbcIOIT.java | 2 +- .../beam/sdk/io/kafka/ReadFromKafkaDoFn.java | 2 +- .../org/apache/beam/sdk/io/kafka/KafkaIOIT.java | 2 +- .../beam/sdk/io/rabbitmq/ExchangeTestPlan.java | 2 +- .../beam/sdk/io/singlestore/SingleStoreIO.java | 4 ++-- .../sdk/io/sparkreceiver/SparkReceiverIOIT.java | 5 +++-- 65 files changed, 115 insertions(+), 121 deletions(-) diff --git a/examples/java/webapis/src/main/java/org/apache/beam/examples/webapis/ImageRequest.java b/examples/java/webapis/src/main/java/org/apache/beam/examples/webapis/ImageRequest.java index 41601c77f070..63a68a334196 100644 --- a/examples/java/webapis/src/main/java/org/apache/beam/examples/webapis/ImageRequest.java +++ b/examples/java/webapis/src/main/java/org/apache/beam/examples/webapis/ImageRequest.java @@ -54,7 +54,7 @@ static Builder builder() { return new AutoValue_ImageRequest.Builder(); } - /** Build an {@link ImageRequest} from a {@param url}. */ + /** Build an {@link ImageRequest} from a {@code url}. */ static ImageRequest of(String url) { return builder().setImageUrl(url).setMimeType(mimeTypeOf(url)).build(); } diff --git a/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/LoadTestBase.java b/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/LoadTestBase.java index 51a4dc185dd3..cd9ef52ed835 100644 --- a/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/LoadTestBase.java +++ b/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/LoadTestBase.java @@ -211,7 +211,7 @@ protected boolean waitForNumMessages(String jobId, String pcollection, Long expe * * @param metrics a map of raw metrics. The results are also appened in the map. * @param launchInfo Job info of the job - * @param config a {@class MetricsConfiguration} + * @param config a {@link MetricsConfiguration} */ private void computeDataflowMetrics( Map metrics, LaunchInfo launchInfo, MetricsConfiguration config) @@ -365,7 +365,7 @@ protected Map getCpuUtilizationMetrics(String jobId, TimeInterva * Computes throughput metrics of the given pcollection in dataflow job. * * @param jobInfo dataflow job LaunchInfo - * @param config the {@class MetricsConfiguration} + * @param config the {@link MetricsConfiguration} * @param timeInterval interval for the monitoring query * @return throughput metrics of the pcollection */ diff --git a/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/storage/FileBasedIOLT.java b/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/storage/FileBasedIOLT.java index 82caea312189..cdcc28e326cd 100644 --- a/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/storage/FileBasedIOLT.java +++ b/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/storage/FileBasedIOLT.java @@ -251,7 +251,7 @@ static class Configuration extends SyntheticSourceOptions { /** Number of dynamic destinations to write. */ @JsonProperty public int numShards = 0; - /** See {@class org.apache.beam.sdk.io.Compression}. */ + /** See {@link org.apache.beam.sdk.io.Compression}. */ @JsonProperty public String compressionType = "UNCOMPRESSED"; /** Runner specified to run the pipeline. */ diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/StatefulDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/StatefulDoFnRunner.java index e562a4067d22..779138834669 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/StatefulDoFnRunner.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/StatefulDoFnRunner.java @@ -49,8 +49,7 @@ /** * A customized {@link DoFnRunner} that handles late data dropping and garbage collection for * stateful {@link DoFn DoFns}. It registers a GC timer in {@link #processElement(WindowedValue)} - * and does cleanup in {@link #onTimer(String, String, BoundedWindow, Instant, Instant, TimeDomain, - * boolean)} + * and does cleanup in {@link #onTimer} * * @param the type of the {@link DoFn} (main) input elements * @param the type of the {@link DoFn} (main) output elements diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java index db1112f73885..762a2338c3e6 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java @@ -95,14 +95,14 @@ public void deleteTimer( timeDomain)); } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, TimeDomain)}. */ + /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ @Deprecated @Override public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) { throw new UnsupportedOperationException("Canceling of timer by ID is not yet supported."); } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, TimeDomain)}. */ + /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ @Deprecated @Override public void deleteTimer(TimerData timerData) { diff --git a/runners/flink/1.19/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java b/runners/flink/1.19/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java index c03799d09535..793959c5e693 100644 --- a/runners/flink/1.19/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java +++ b/runners/flink/1.19/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java @@ -52,7 +52,7 @@ public interface OutputWrapper extends Output { @Override default void emitWatermarkStatus(WatermarkStatus watermarkStatus) {} - /** In Flink 1.19 the {@code emitRecordAttributes} method was added. */ + /** In Flink 1.19 the {@code recordAttributes} method was added. */ @Override default void emitRecordAttributes(RecordAttributes recordAttributes) { throw new UnsupportedOperationException("emitRecordAttributes not implemented"); diff --git a/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java b/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java index dca2f3075aa0..14e0399ebace 100644 --- a/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java +++ b/runners/flink/1.20/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java @@ -1655,7 +1655,7 @@ void onFiredOrDeletedTimer(TimerData timer) { } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, TimeDomain)}. */ + /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ @Deprecated @Override public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) { @@ -1672,7 +1672,7 @@ public void deleteTimer( } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, TimeDomain)}. */ + /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ @Override @Deprecated public void deleteTimer(TimerData timer) { diff --git a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java index f83e719ed0b9..aa84818a5730 100644 --- a/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java +++ b/runners/flink/2.0/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java @@ -1655,7 +1655,7 @@ void onFiredOrDeletedTimer(TimerData timer) { } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, TimeDomain)}. */ + /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ @Deprecated @Override public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) { @@ -1672,7 +1672,7 @@ public void deleteTimer( } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, TimeDomain)}. */ + /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ @Override @Deprecated public void deleteTimer(TimerData timer) { diff --git a/runners/flink/2.0/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java b/runners/flink/2.0/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java index a39af17766fc..5f9a1f1dd472 100644 --- a/runners/flink/2.0/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java +++ b/runners/flink/2.0/src/test/java/org/apache/beam/runners/flink/streaming/StreamSources.java @@ -52,7 +52,7 @@ public interface OutputWrapper extends Output { @Override default void emitWatermarkStatus(WatermarkStatus watermarkStatus) {} - /** In Flink 1.19 the {@code emitRecordAttributes} method was added. */ + /** In Flink 1.19 the {@code recordAttributes} method was added. */ @Override default void emitRecordAttributes(RecordAttributes recordAttributes) { throw new UnsupportedOperationException("emitRecordAttributes not implemented"); diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java index 2bf0d40cd5f2..ed06b8f6b070 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java @@ -1658,7 +1658,7 @@ void onFiredOrDeletedTimer(TimerData timer) { } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, TimeDomain)}. */ + /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ @Deprecated @Override public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) { @@ -1675,7 +1675,7 @@ public void deleteTimer( } } - /** @deprecated use {@link #deleteTimer(StateNamespace, String, TimeDomain)}. */ + /** @deprecated use {@link #deleteTimer(StateNamespace, String, String, TimeDomain)}. */ @Override @Deprecated public void deleteTimer(TimerData timer) { diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowElementExecutionTracker.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowElementExecutionTracker.java index ccc98fa87e6a..dffd10806afa 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowElementExecutionTracker.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowElementExecutionTracker.java @@ -100,14 +100,14 @@ private static class ElementExecution { /** Marker execution to represent when there is no element currently being processed. */ static final ElementExecution IDLE = new ElementExecution(); - /** Only empty for {@see IDLE}. */ + /** Only empty for {@link #IDLE}. */ final Optional step; ElementExecution(NameContext step) { this.step = Optional.of(step); } - /** Only used for {@see IDLE}. */ + /** Only used for {@link #IDLE}. */ private ElementExecution() { step = Optional.empty(); } @@ -155,8 +155,8 @@ private static class ReaderWriterState { * Journal of fragments of execution per element to count for attributing processing time. Each * time we transition up or down the stage fusion graph we add an execution fragment for the * currently processing element with an incremented snapshot version. Each snapshot version must - * have a representative value in the {@code executionJournal}, or {@see IDLE_EXECUTION} to - * represent completion of processing. + * have a representative value in the {@code executionJournal}, or {@link ElementExecution#IDLE} + * to represent completion of processing. */ private final Journal executionJournal; diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowExecutionContext.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowExecutionContext.java index cd9a222b4878..3dc3293aa26c 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowExecutionContext.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowExecutionContext.java @@ -104,8 +104,8 @@ public DataflowExecutionContext( * PCollectionView PCollectionViews}. * *

If side input source metadata is provided by the service in {@link SideInputInfo - * sideInputInfos}, we request a {@link SideInputReader} from the {@code executionContext} using - * that info. If no side input source metadata is provided but the DoFn expects side inputs, as a + * sideInputInfos}, we request a {@link SideInputReader} from the execution context using that + * info. If no side input source metadata is provided but the DoFn expects side inputs, as a * fallback, we request a {@link SideInputReader} based only on the expected views. * *

These cases are not disjoint: Whenever a {@link GroupAlsoByWindowFn} takes side inputs, diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricsToPerStepNamespaceMetricsConverter.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricsToPerStepNamespaceMetricsConverter.java index 356781e11e53..0ab6a0537d9d 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricsToPerStepNamespaceMetricsConverter.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricsToPerStepNamespaceMetricsConverter.java @@ -171,8 +171,8 @@ private static void addOutlierStatsToHistogram( /** * @param metricName The {@link MetricName} that represents this Histogram. - * @param value The histogram value. Currently we only support converting histograms that use - * {@code linear} or {@code exponential} buckets. + * @param inputHistogram The histogram value. Currently we only support converting histograms that + * use {@code linear} or {@code exponential} buckets. * @return If this conversion succeeds, a {@code MetricValue} that represents this histogram. * Otherwise returns an empty optional. */ diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkUnitClient.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkUnitClient.java index 26b1dc55ead9..e159e7fbd2e6 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkUnitClient.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkUnitClient.java @@ -75,7 +75,7 @@ public interface WorkUnitClient { * Reports the worker messages to dataflow. We currently report autoscaling signals and * perworkermetrics with this path. * - * @param msg the WorkerMessages to report + * @param messages the WorkerMessages to report * @return a list of {@link WorkerMessageResponse} */ List reportWorkerMessage(List messages) throws IOException; diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java index 7dec8d1ed6ce..9ef87ef92ab9 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java @@ -58,8 +58,8 @@ * broken stream. * *

Subclasses should override {@link #newResponseHandler()} to implement a handler for physical - * stream connection. {@link #onNewStream()} to perform any work that must be done when a new stream - * is created, such as sending headers or retrying requests. + * stream connection. {@link #onFlushPending(boolean)} to perform any work that must be done when a + * new stream is created, such as sending headers or retrying requests. * *

{@link #trySend(RequestT)} and {@link #startStream()} should not be called when handling * responses; use {@link #executeSafely(Runnable)} instead. diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/ResettableThrowingStreamObserver.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/ResettableThrowingStreamObserver.java index b027a6cac7b0..90a479c8f85b 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/ResettableThrowingStreamObserver.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/ResettableThrowingStreamObserver.java @@ -28,13 +28,12 @@ import org.slf4j.Logger; /** - * Request observer that allows resetting its internal delegate using a {@link - * #streamObserverFactory}. + * Request observer that allows resetting its internal delegate. * - * @implNote {@link StreamObserver}s generated by {@link #streamObserverFactory} are expected to be - * {@link ThreadSafe}. Has same methods declared in {@link StreamObserver}, but they throw - * {@link StreamClosedException} and {@link WindmillStreamShutdownException}, which much be - * handled by callers. + * @implNote {@link StreamObserver}s injected via {@link #reset(TerminatingStreamObserver)} are + * expected to be {@link ThreadSafe}. Has same methods declared in {@link StreamObserver}, but + * they throw {@link StreamClosedException} and {@link WindmillStreamShutdownException}, which + * much be handled by callers. */ @ThreadSafe @Internal @@ -49,9 +48,9 @@ final class ResettableThrowingStreamObserver { private boolean isPoisoned = false; /** - * Indicates that the current delegate is closed via {@link #poison() or {@link #onCompleted()}}. - * If not poisoned, a call to {@link #reset()} is required to perform future operations on the - * StreamObserver. + * Indicates that the current delegate is closed via {@link #poison()} or {@link #onCompleted()}. + * If not poisoned, a call to {@link #reset(TerminatingStreamObserver)} is required to perform + * future operations on the StreamObserver. */ @GuardedBy("this") private boolean isCurrentStreamClosed = true; diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java index 0e38abb1b78b..30a49b2968b3 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/ArtifactStagingService.java @@ -486,7 +486,6 @@ private Optional getLocal() { /** * Attempts to provide a reasonable filename for the artifact. * - * @param index a monotonically increasing index, which provides uniqueness * @param environment the environment id * @param artifact the artifact itself */ diff --git a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismExecutor.java b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismExecutor.java index 111d937fcbf6..87551cfc03c3 100644 --- a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismExecutor.java +++ b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismExecutor.java @@ -110,7 +110,7 @@ void execute() throws IOException { /** * Execute the {@link ProcessBuilder} that starts the Prism service. Redirects output to the - * {@param outputStream}. + * {@code outputStream}. */ void execute(OutputStream outputStream) throws IOException { execute(createProcessBuilder().redirectErrorStream(true)); @@ -127,7 +127,7 @@ void execute(OutputStream outputStream) throws IOException { /** * Execute the {@link ProcessBuilder} that starts the Prism service. Redirects output to the - * {@param file}. + * {@code file}. */ void execute(File file) throws IOException { execute( diff --git a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismPipelineResult.java b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismPipelineResult.java index 7508e505725e..c4a43710e5e1 100644 --- a/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismPipelineResult.java +++ b/runners/prism/java/src/main/java/org/apache/beam/runners/prism/PrismPipelineResult.java @@ -32,7 +32,7 @@ class PrismPipelineResult implements PipelineResult { private final Runnable cleanup; /** - * Instantiate the {@link PipelineResult} from the {@param delegate} and a {@param cancel} to be + * Instantiate the {@link PipelineResult} from the {@code delegate} and a {@code cancel} to be * called when stopping the underlying executable Job management service. */ PrismPipelineResult(PipelineResult delegate, Runnable cancel) { diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/runtime/ClassicBundleManager.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/runtime/ClassicBundleManager.java index e55eb2cc34c9..53b6968e1119 100644 --- a/runners/samza/src/main/java/org/apache/beam/runners/samza/runtime/ClassicBundleManager.java +++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/runtime/ClassicBundleManager.java @@ -40,13 +40,11 @@ import org.slf4j.LoggerFactory; /** - * Implementation of BundleManager for non-portable mode. Keeps track of the async function - * completions. + * {@inheritDoc} Implementation of BundleManager for non-portable mode. Keeps track of the async + * function completions. * *

This class is not thread safe and the current implementation relies on the assumption that * messages are dispatched to BundleManager in a single threaded mode. - * - *

{@inheritDoc} */ @SuppressWarnings({ "nullness" // TODO(https://github.com/apache/beam/issues/20497) diff --git a/runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpers.java b/runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpers.java index 7ea277740b62..7b6d16cc6a68 100644 --- a/runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpers.java +++ b/runners/spark/3/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/helpers/EncoderHelpers.java @@ -176,10 +176,10 @@ public static Encoder encoderFor(Coder coder) { /** * Creates a Spark {@link Encoder} for {@link T} of {@link StructType} with fields {@code value}, - * {@code timestamp}, {@code windows} and {@code pane}. + * {@code timestamp}, {@code window} and {@code pane}. * * @param value {@link Encoder} to encode field `{@code value}`. - * @param window {@link Encoder} to encode individual windows in field `{@code windows}` + * @param window {@link Encoder} to encode individual windows in field `{@code window}` */ public static Encoder> windowedValueEncoder( Encoder value, Encoder window) { diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java index 03f7885f7e9f..1488bed3231c 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java @@ -88,7 +88,7 @@ public static JavaRDD>>> groupByKeyOnly( /** * Spark-level group by key operation that keeps original Beam {@link KV} pairs unchanged. * - * @returns {@link JavaPairRDD} where the first value in the pair is the serialized key, and the + * @return {@link JavaPairRDD} where the first value in the pair is the serialized key, and the * second is an iterable of the {@link KV} pairs with that key. */ static JavaPairRDD>>> groupByKeyPair( diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystem.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystem.java index 73caa7284e98..00dd2e367ce8 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystem.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystem.java @@ -46,9 +46,9 @@ public abstract class FileSystem { *

Implementation should handle the following ambiguities of a user-provided spec: * *

    - *
  1. {@code spec} could be a glob or a uri. {@link #match} should be able to tell and choose + *
  2. {@code specs} could be a glob or a uri. {@link #match} should be able to tell and choose * efficient implementations. - *
  3. The user-provided {@code spec} might refer to files or directories. It is common that + *
  4. The user-provided {@code specs} might refer to files or directories. It is common that * users that wish to indicate a directory will omit the trailing {@code /}, such as in a * spec of {@code "/tmp/dir"}. The {@link FileSystem} should be able to recognize a * directory with the trailing {@code /} omitted, but should always return a correct {@link diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java index 7e2940a2c35b..155df53c6c2e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileSystems.java @@ -100,9 +100,9 @@ public static boolean hasGlobWildcard(String spec) { *

    Implementation handles the following ambiguities of a user-provided spec: * *

      - *
    1. {@code spec} could be a glob or a uri. {@link #match} should be able to tell and choose + *
    2. {@code specs} could be a glob or a uri. {@link #match} should be able to tell and choose * efficient implementations. - *
    3. The user-provided {@code spec} might refer to files or directories. It is common that + *
    4. The user-provided {@code specs} might refer to files or directories. It is common that * users that wish to indicate a directory will omit the trailing path delimiter, such as * {@code "/tmp/dir"} in Linux. The {@link FileSystem} should be able to recognize a * directory with the trailing path delimiter omitted, but should always return a correct diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/UnknownLogicalType.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/UnknownLogicalType.java index af19f8f33e5d..104cbdeefb15 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/UnknownLogicalType.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/logicaltypes/UnknownLogicalType.java @@ -26,7 +26,7 @@ * *

      Java transforms and JVM runners should take care when processing these types as they may have * a particular semantic meaning in the context that created them. For example, consider an - * enumerated type backed by a primitive {@class FieldType.INT8}. A Java transform can clearly pass + * enumerated type backed by a primitive {@code FieldType.INT8}. A Java transform can clearly pass * through this value and pass it back to a context that understands it, but that transform should * not blindly perform arithmetic on this type. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java index 107f61d7b675..41beb93a5cbe 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java @@ -1049,7 +1049,7 @@ public interface MultiOutputReceiver { * RestrictionTracker.HasProgress} implementation within the {@link RestrictionTracker} is an * inaccurate representation of known work. * - *

      It is up to each splittable {@DoFn} to convert between their natural representation of + *

      It is up to each splittable {@link DoFn} to convert between their natural representation of * outstanding work and this representation. For example: * *

        diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java index 159f92cd5e87..8bd2b23befe3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Flatten.java @@ -92,7 +92,7 @@ public static Iterables iterables() { * {@code other} and then applying {@link #pCollections()}, but has the advantage that it can be * more easily used inline. * - *

        Both {@cpde PCollections} must have equal {@link WindowFn}s. The output elements of {@code + *

        Both {@code PCollection}s must have equal {@link WindowFn}s. The output elements of {@code * Flatten} are in the same windows and have the same timestamps as their corresponding input * elements. The output {@code PCollection} will have the same {@link WindowFn} as both inputs. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/JsonToRow.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/JsonToRow.java index d812d299a836..69667929dad3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/JsonToRow.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/JsonToRow.java @@ -142,16 +142,16 @@ private ObjectMapper objectMapper() { * *

        Then access the parsed results via, {@link ParseResult#getResults()} * - *

        {@Code PCollection personRows = results.getResults()} + *

        {@code PCollection personRows = results.getResults()} * *

        And access the failed to parse results via, {@link ParseResult#getFailedToParseLines()} * - *

        {@Code PCollection errorsLines = results.getFailedToParseLines()} + *

        {@code PCollection errorsLines = results.getFailedToParseLines()} * *

        This will produce a Row with Schema {@link JsonToRowWithErrFn#ERROR_ROW_SCHEMA} * *

        To access the reason for the failure you will need to first enable extended error reporting. - * {@link JsonToRowWithErrFn#withExtendedErrorInfo()} {@Code ParseResult results = + * {@link JsonToRowWithErrFn#withExtendedErrorInfo()} {@code ParseResult results = * jsonPersons.apply(JsonToRow.withExceptionReporting(PERSON_SCHEMA).withExtendedErrorInfo()); } * *

        This will provide access to the reason for the Parse failure. The call to {@link diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java index 35f71d690102..8f254642f081 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java @@ -741,7 +741,7 @@ public abstract static class ElementParameter extends Parameter { } /** - * Descriptor for a (@link Parameter} of type {@link DoFn.Element} where the type does not match + * Descriptor for a {@link Parameter} of type {@link DoFn.Element} where the type does not match * the DoFn's input type. This implies that the input must have a schema that is compatible. */ @AutoValue diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RowStringInterpolator.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RowStringInterpolator.java index 513772e8ec35..0398f4d63ae3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RowStringInterpolator.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RowStringInterpolator.java @@ -27,8 +27,6 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.beam.sdk.schemas.Schema; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.values.Row; import org.apache.beam.sdk.values.ValueInSingleWindow; import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.MoreObjects; @@ -60,9 +58,9 @@ * // output --> "unified batch and streaming!" * } * - *

        Additionally, {@link #interpolate(Row, BoundedWindow, PaneInfo, Instant)} can be used in - * streaming scenarios to substitute windowing metadata into the template String. To make use of - * this, use the relevant placeholder: + *

        Additionally, {@link #interpolate(ValueInSingleWindow)} can be used in streaming scenarios to + * substitute windowing metadata into the template String. To make use of this, use the relevant + * placeholder: * *

          *
        • $WINDOW: the window's string representation diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java index 4268c6c70671..c2b12255e42d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/TransformUpgrader.java @@ -466,8 +466,7 @@ public static byte[] toByteArray(Object object) { * A utility method that converts a byte array obtained by invoking {@link #toByteArray(Object)} * back to a Java object. * - * @param bytes a {@code byte} array generated by invoking the {@link #toByteArray(Object)} - * method. + * @param bytes an array of bytes generated by invoking the {@link #toByteArray(Object)} method. * @return re-generated object. */ public static Object fromByteArray(byte[] bytes) throws InvalidClassException { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnknownCoderWrapper.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnknownCoderWrapper.java index 8510d8dd0c12..9126ab77a88c 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnknownCoderWrapper.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/construction/UnknownCoderWrapper.java @@ -24,7 +24,7 @@ import org.apache.beam.sdk.coders.CoderException; /** - * Represents a {@coder Coder} that is not defined in Java SDK, for example, a coder that is + * Represents a {@code Coder} that is not defined in Java SDK, for example, a coder that is * available in an external SDK that cannot be fully interpretted in the Java SDK. */ public class UnknownCoderWrapper extends AtomicCoder { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java index 1f3ec0f427b4..9925c3653674 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java @@ -146,7 +146,8 @@ private void assertSerializationClassLoader( } /** - * a sample class to test framework serialization, {@see SerializableUtilsTest#customClassLoader}. + * a sample class to test framework serialization, {@link + * SerializableUtilsTest#customClassLoader}. */ public static class Foo implements Serializable {} } diff --git a/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java b/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java index c3c3ccfd3266..ae658d93955b 100644 --- a/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java +++ b/sdks/java/expansion-service/src/main/java/org/apache/beam/sdk/expansion/service/ExpansionService.java @@ -440,7 +440,7 @@ static Row decodeConfigObjectRow(SchemaApi.Schema schema, ByteString payload) { * *

          If no Schema is registered, {@link ConfigT} must have a zero-argument constructor and * setters corresponding to each field in the row encoded by {@code payload}. Note {@link ConfigT} - * may have additional setters not represented in the {@ocde payload} schema. + * may have additional setters not represented in the {@code payload} schema. * *

          Exposed for testing only. No backwards compatibility guarantees. */ diff --git a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java index 396fde452987..e3f01dd85295 100644 --- a/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java +++ b/sdks/java/extensions/google-cloud-platform-core/src/main/java/org/apache/beam/sdk/extensions/gcp/util/GcsUtil.java @@ -118,7 +118,7 @@ public long fileSize(GcsPath path) throws IOException { return delegate.fileSize(path); } - /** @deprecated use {@link #getBlob(GcsPath)}. */ + /** @deprecated use {@link #getBlob(GcsPath, BlobGetOption...)}. */ @Deprecated public StorageObject getObject(GcsPath gcsPath) throws IOException { return delegate.getObject(gcsPath); diff --git a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/OrderedProcessingHandler.java b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/OrderedProcessingHandler.java index 058c01f019db..832dc5e427c6 100644 --- a/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/OrderedProcessingHandler.java +++ b/sdks/java/extensions/ordered/src/main/java/org/apache/beam/sdk/extensions/ordered/OrderedProcessingHandler.java @@ -38,7 +38,7 @@ * *

          There are two types of processing - when the sequence numbers are contiguous per key and these * sequences per keys are independent of each other, and when there is a global sequence shared by - * all keys. In case of the global sequence processing the custom handler must extend from {@see + * all keys. In case of the global sequence processing the custom handler must extend from {@link * OrderedProcessingGlobalSequenceHandler}. * * @param type of events to be processed diff --git a/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/OrderedEventProcessorTestBase.java b/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/OrderedEventProcessorTestBase.java index cdf222fadf56..a16f32c99199 100644 --- a/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/OrderedEventProcessorTestBase.java +++ b/sdks/java/extensions/ordered/src/test/java/org/apache/beam/sdk/extensions/ordered/OrderedEventProcessorTestBase.java @@ -115,7 +115,7 @@ public void map( /** * The majority of the tests use this method. Testing is done in the global window. * - * @throws @UnknownKeyFor @NonNull @Initialized CannotProvideCoderException + * @throws CannotProvideCoderException */ protected void doTest( Event[] events, diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java index 808449de5d54..858dbcd5bf76 100644 --- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java +++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/catalog/CatalogManager.java @@ -31,8 +31,8 @@ *

          Implementations should have a way of determining which catalog is currently active, and * produce it when {@link #currentCatalog()} is invoked. * - *

          When {@link #registerTableProvider(String, TableProvider)} is called, the provider should - * become available for all catalogs. + *

          When {@link #registerTableProvider(TableProvider)} is called, the provider should become + * available for all catalogs. */ @Internal public interface CatalogManager { diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java index ed717b7f6e94..ca780440863c 100644 --- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java +++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java @@ -93,7 +93,7 @@ public static class RowsBuilder { * ) * } * - * @args pairs of column type and column names. + * @param args pairs of column type and column names. */ public static RowsBuilder of(final Object... args) { Schema beamSQLSchema = TestTableUtils.buildBeamSqlSchema(args); diff --git a/sdks/java/extensions/timeseries/src/main/java/org/apache/beam/sdk/extensions/timeseries/FillGaps.java b/sdks/java/extensions/timeseries/src/main/java/org/apache/beam/sdk/extensions/timeseries/FillGaps.java index 97dabfe93cab..f42653754696 100644 --- a/sdks/java/extensions/timeseries/src/main/java/org/apache/beam/sdk/extensions/timeseries/FillGaps.java +++ b/sdks/java/extensions/timeseries/src/main/java/org/apache/beam/sdk/extensions/timeseries/FillGaps.java @@ -83,7 +83,7 @@ * *

          By default, the latest element from the previous bucket is propagated into missing buckets. The user can override * this using the {@link #withMergeFunction} method. Several built-in merge functions are provided for - - * {@link #keepLatest()} (the default), {@link #keepEarliest()}, an {@link #keepNull()}. + * {@link #keepLatest()} (the default), {@link #keepEarliest()}, and {@code keepNull()}. * *

          Sometimes elements need to be modified before being propagated into a missing bucket. For example, consider the * following element type containing a timestamp: diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/SplittableSplitAndSizeRestrictionsDoFnRunner.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/SplittableSplitAndSizeRestrictionsDoFnRunner.java index fac83485d22e..528864d9e90a 100644 --- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/SplittableSplitAndSizeRestrictionsDoFnRunner.java +++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/SplittableSplitAndSizeRestrictionsDoFnRunner.java @@ -74,7 +74,7 @@ * Double>>} * * - *

          In addition to this, it passes {@Code OutputReceiver} to the DoFn GetRestriction + *

          In addition to this, it passes {@code OutputReceiver} to the DoFn GetRestriction * method. */ @Internal diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java index 2de4a47ebaec..cca6a452a0e4 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java @@ -490,8 +490,8 @@ public Read withArrivalTimeWatermarkPolicy() { /** * Specifies the {@code WatermarkPolicyFactory} as ArrivalTimeWatermarkPolicyFactory. * - *

          {@param watermarkIdleDurationThreshold} Denotes the duration for which the watermark can - * be idle. + *

          {@code watermarkIdleDurationThreshold} Denotes the duration for which the watermark can be + * idle. */ public Read withArrivalTimeWatermarkPolicy(Duration watermarkIdleDurationThreshold) { return toBuilder() diff --git a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/ConfigWrapper.java b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/ConfigWrapper.java index b073e275be38..662366228124 100644 --- a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/ConfigWrapper.java +++ b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/ConfigWrapper.java @@ -29,7 +29,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** Class for building {@link PluginConfig} object of the specific class {@param }. */ +/** Class for building {@link PluginConfig} object of the specific class {@code }. */ public class ConfigWrapper { private static final Logger LOG = LoggerFactory.getLogger(ConfigWrapper.class); diff --git a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/PluginConfigInstantiationUtils.java b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/PluginConfigInstantiationUtils.java index ced112010193..bc54d154fb09 100644 --- a/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/PluginConfigInstantiationUtils.java +++ b/sdks/java/io/cdap/src/main/java/org/apache/beam/sdk/io/cdap/PluginConfigInstantiationUtils.java @@ -41,9 +41,9 @@ public class PluginConfigInstantiationUtils { private static final String MACRO_FIELDS_FIELD_NAME = "macroFields"; /** - * Method for instantiating {@link PluginConfig} object of specific class {@param configClass}. + * Method for instantiating {@link PluginConfig} object of specific class {@code configClass}. * After instantiating, it will go over all {@link Field}s with the {@link Name} annotation and - * set the appropriate parameter values from the {@param params} map for them. + * set the appropriate parameter values from the {@code params} map for them. * * @param params map of config fields, where key is the name of the field, value must be String or * boxed primitive diff --git a/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/CdapIOIT.java b/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/CdapIOIT.java index d4955ac43b40..bd666dd7cf23 100644 --- a/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/CdapIOIT.java +++ b/sdks/java/io/cdap/src/test/java/org/apache/beam/sdk/io/cdap/CdapIOIT.java @@ -72,7 +72,7 @@ /** * IO Integration test for {@link org.apache.beam.sdk.io.cdap.CdapIO}. * - *

          {@see https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests} for + *

          {@link https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests} for * more details. */ @RunWith(JUnit4.class) diff --git a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java index ba4ac2769949..0a58a480dba4 100644 --- a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java +++ b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java @@ -533,7 +533,7 @@ public ConnectionConfiguration withPathPrefix(String pathPrefix) { /** * If Elasticsearch authentication is enabled, provide an API key. Be aware that you can only - * use one of {@Code withApiToken()}, {@code withBearerToken()} and {@code withDefaultHeaders} + * use one of {@code withApiToken()}, {@code withBearerToken()} and {@code withDefaultHeaders} * at the same time, as they (potentially) use the same header. * * @param apiKey the API key used to authenticate to Elasticsearch @@ -549,7 +549,7 @@ public ConnectionConfiguration withApiKey(String apiKey) { /** * If Elasticsearch authentication is enabled, provide a bearer token. Be aware that you can - * only use one of {@Code withApiToken()}, {@code withBearerToken()} and {@code + * only use one of {@code withApiToken()}, {@code withBearerToken()} and {@code * withDefaultHeaders} at the same time, as they (potentially) use the same header. * * @param bearerToken the bearer token used to authenticate to Elasticsearch @@ -1793,7 +1793,7 @@ public DocToBulk withDocVersionType(String docVersionType) { * Providing this hint means there is no need for setting {@link * DocToBulk#withConnectionConfiguration}. This can also be very useful for testing purposes. * - *

          Note: if the value of @param backendVersion differs from the version the destination + *

          Note: if the value of {@code backendVersion} differs from the version the destination * cluster is running, behavior is undefined and likely to yield errors. * * @param backendVersion the major version number of the version of Elasticsearch being run in @@ -2486,7 +2486,7 @@ public BulkIO withIgnoreVersionConflicts(boolean ignoreVersionConflicts) { /** * Provide a set of textual error types which can be contained in Bulk API response - * items[].error.type field. Any element in @param allowableResponseErrorTypes will suppress + * items[].error.type field. Any element in {@code allowableResponseErrorTypes} will suppress * errors of the same type in Bulk responses. * *

          See also @@ -2543,7 +2543,7 @@ public BulkIO withUseStatefulBatches(boolean useStatefulBatches) { * batches are maintained per-key-per-window. BE AWARE that low values for @param * maxParallelRequests, in particular if the input data has a finite number of windows, can * reduce parallelism greatly. Because data will be temporarily globally windowed as part of - * writing data to Elasticsearch, if @param maxParallelRequests is set to 1, there will only + * writing data to Elasticsearch, if {@code maxParallelRequests} is set to 1, there will only * ever be 1 request in flight. Having only a single request in flight can be beneficial for * ensuring an Elasticsearch cluster is not overwhelmed by parallel requests, but may not work * for all use cases. If this number is less than the number of maximum workers in your @@ -2566,7 +2566,7 @@ public BulkIO withMaxParallelRequestsPerWindow(int maxParallelRequests) { * batches are maintained per-key-per-window. BE AWARE that low values for @param * maxParallelRequests, in particular if the input data has a finite number of windows, can * reduce parallelism greatly. Because data will be temporarily globally windowed as part of - * writing data to Elasticsearch, if @param maxParallelRequests is set to 1, there will only + * writing data to Elasticsearch, if {@code maxParallelRequests} is set to 1, there will only * ever be 1 request in flight. Having only a single request in flight can be beneficial for * ensuring an Elasticsearch cluster is not overwhelmed by parallel requests, but may not work * for all use cases. If this number is less than the number of maximum workers in your diff --git a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java index d01e0051c7b8..54c885f5415f 100644 --- a/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java +++ b/sdks/java/io/file-schema-transform/src/test/java/org/apache/beam/sdk/io/fileschematransform/FileReadSchemaTransformFormatProviderTest.java @@ -57,7 +57,7 @@ public abstract class FileReadSchemaTransformFormatProviderTest { - /** Returns the format of the {@linke FileReadSchemaTransformFormatProviderTest} subclass. */ + /** Returns the format of the {@link FileReadSchemaTransformFormatProviderTest} subclass. */ protected abstract String getFormat(); /** Given a Beam Schema, returns the relevant source's String schema representation. */ diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AvroGenericRecordToStorageApiProto.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AvroGenericRecordToStorageApiProto.java index 35751e2758e1..4cf8f9c73bab 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AvroGenericRecordToStorageApiProto.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/AvroGenericRecordToStorageApiProto.java @@ -267,7 +267,7 @@ public static TableSchema protoTableSchemaFromAvroSchema(Schema schema) { } /** - * Forwards {@param changeSequenceNum} to {@link #messageFromGenericRecord(Descriptor, + * Forwards {@code changeSequenceNum} to {@link #messageFromGenericRecord(Descriptor, * GenericRecord, String, String)} via {@link Long#toHexString}. */ public static DynamicMessage messageFromGenericRecord( diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BeamRowToStorageApiProto.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BeamRowToStorageApiProto.java index a4d20707304e..36e1d77b67bf 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BeamRowToStorageApiProto.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BeamRowToStorageApiProto.java @@ -146,7 +146,7 @@ private static ByteString toProtoByteString(Object o) { .build(); /** - * Forwards (@param changeSequenceNum) to {@link #messageFromBeamRow(Descriptor, Row, String, + * Forwards ({@code changeSequenceNum}) to {@link #messageFromBeamRow(Descriptor, Row, String, * String)} via {@link Long#toHexString}. */ public static DynamicMessage messageFromBeamRow( diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtils.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtils.java index 2c6704fb6545..74032c36438e 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtils.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtils.java @@ -449,7 +449,7 @@ public static TimestampPicos fromString(String timestampString) { *

          Supports both standard and legacy SQL types. * * @param schema Schema of the type returned - * @param nestedFields Nested fields for the given type (eg. RECORD type) + * @param options Options for schema conversion * @return Corresponding Beam {@link FieldType} */ private static FieldType fromTableFieldSchemaType( diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/RowMutationInformation.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/RowMutationInformation.java index 18905d149b9d..c7001c622fc9 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/RowMutationInformation.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/RowMutationInformation.java @@ -57,10 +57,10 @@ public enum MutationType { public abstract String getChangeSequenceNumber(); /** - * Instantiate {@link RowMutationInformation} with {@link MutationType} and the {@param + * Instantiate {@link RowMutationInformation} with {@link MutationType} and the {@code * sequenceNumber}. @deprecated - instantiates {@link RowMutationInformation} via {@link - * #of(MutationType, String)} forwarding the {@param sequenceNumber} value using {@link - * Long#toHexString(long)}. {@param sequenceNumber} values {@code < 0} will throw an error. + * #of(MutationType, String)} forwarding the {@code sequenceNumber} value using {@link + * Long#toHexString(long)}. {@code sequenceNumber} values {@code < 0} will throw an error. */ @Deprecated public static RowMutationInformation of(MutationType mutationType, long sequenceNumber) { @@ -70,11 +70,11 @@ public static RowMutationInformation of(MutationType mutationType, long sequence } /** - * Instantiate {@link RowMutationInformation} with {@link MutationType} and the {@param + * Instantiate {@link RowMutationInformation} with {@link MutationType} and the {@code * changeSequenceNumber}, which sets the BigQuery API {@code _CHANGE_SEQUENCE_NUMBER} pseudo * column, enabling custom user-supplied ordering of {@link RowMutation}s. * - *

          Requirements for the {@param changeSequenceNumber}: + *

          Requirements for the {@code changeSequenceNumber}: * *

            *
          • fixed format {@code String} in hexadecimal format @@ -87,12 +87,12 @@ public static RowMutationInformation of(MutationType mutationType, long sequence * FFFFFFFFFFFFFFFF/FFFFFFFFFFFFFFFF/FFFFFFFFFFFFFFFF/FFFFFFFFFFFFFFFF} *
          * - *

          Below are some {@param changeSequenceNumber} scenarios: + *

          Below are some {@code changeSequenceNumber} scenarios: * * * - * - * + * + * * * * diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java index ab5ae80065a4..a2bed6ce3a0a 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/TableRowToStorageApiProto.java @@ -818,7 +818,7 @@ public static DynamicMessage messageFromMap( } /** - * Forwards {@param changeSequenceNum} to {@link #messageFromTableRow(SchemaInformation, + * Forwards {@code changeSequenceNum} to {@link #messageFromTableRow(SchemaInformation, * Descriptor, TableRow, boolean, boolean, TableRow, String, String)} via {@link * Long#toHexString}. */ diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubClient.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubClient.java index bd01604643e1..3e2206d59c01 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubClient.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubClient.java @@ -101,7 +101,7 @@ protected static Long parseTimestampAsMsSinceEpoch(String timestamp) { /** * Return the timestamp (in ms since unix epoch) to use for a Pubsub message with {@code - * timestampAttribute} and {@code attriutes}. + * timestampAttribute} and {@code attributes}. * *

          The message attributes must contain {@code timestampAttribute}, and the value of that * attribute will be taken as the timestamp. diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubRowToMessage.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubRowToMessage.java index 58849fd3f3bc..da3ee89de0d8 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubRowToMessage.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubRowToMessage.java @@ -120,7 +120,7 @@ InputSchemaFactory inputSchemaFactory() { /** * As a convenience method, generates {@link InputSchemaFactory} for expected {@link Schema} for * {@link Row} input into {@link PubsubRowToMessage}. The {@link Field} for {@link - * #getPayloadKeyName()} is excluded for null {@param payloadFieldType}. See {@link + * #getPayloadKeyName()} is excluded for null {@code payloadFieldType}. See {@link * InputSchemaFactory#buildSchema(Field...)} for details on how to add additional fields. */ InputSchemaFactory inputSchemaFactory(@Nullable FieldType payloadFieldType) { @@ -308,7 +308,7 @@ private SchemaReflection(Schema schema) { this.schema = schema; } - /** Returns true of all {@param fieldMatchers} {@link FieldMatcher#match(Schema)}. */ + /** Returns true of all {@code fieldMatchers} {@link FieldMatcher#match(Schema)}. */ boolean matchesAll(FieldMatcher... fieldMatchers) { for (FieldMatcher fieldMatcher : fieldMatchers) { if (!fieldMatcher.match(schema)) { @@ -318,7 +318,7 @@ boolean matchesAll(FieldMatcher... fieldMatchers) { return true; } - /** Returns true of any {@param fieldMatchers} {@link FieldMatcher#match(Schema)}. */ + /** Returns true of any {@code fieldMatchers} {@link FieldMatcher#match(Schema)}. */ boolean matchesAny(FieldMatcher... fieldMatchers) { for (FieldMatcher fieldMatcher : fieldMatchers) { if (fieldMatcher.match(schema)) { @@ -558,7 +558,7 @@ static Builder builder() { /** * Builds a {@link Schema} from {@link #getAttributesField()} and {@link #getTimestampField()} - * and {@param additionalFields}. Users are encouraged to use the {@link #removeFields(Schema, + * and {@code additionalFields}. Users are encouraged to use the {@link #removeFields(Schema, * String...)} method to customize the resulting {@link Schema}. */ Schema buildSchema(Field... additionalFields) { diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSource.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSource.java index b9a554d54ade..bc91e9f381d4 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSource.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSource.java @@ -581,7 +581,7 @@ void ackBatch(List ackIds) throws IOException { /** * BLOCKING NACK (ie request deadline extension of 0) receipt of messages from Pubsub with the - * given {@code ockIds}. Does not retain {@code ackIds}. + * given {@code ackIds}. Does not retain {@code ackIds}. */ public void nackBatch(long nowMsSinceEpoch, List ackIds) throws IOException { pubsubClient.get().modifyAckDeadline(subscription, ackIds, 0); diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchema.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchema.java index 11220b69b7bf..6085431396db 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchema.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchema.java @@ -45,7 +45,7 @@ public class ReadSpannerSchema extends DoFn { private transient SpannerAccessor spannerAccessor; /** - * Constructor for creating an instance of the ReadSpannerSchema class. If no {@param + * Constructor for creating an instance of the ReadSpannerSchema class. If no {@code * allowedTableNames} is passed, every single table is allowed. * * @param config The SpannerConfig object that contains the configuration for accessing the diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/QueryChangeStreamAction.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/QueryChangeStreamAction.java index 69e89e74a38b..e81d8aef473c 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/QueryChangeStreamAction.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/action/QueryChangeStreamAction.java @@ -104,9 +104,9 @@ public class QueryChangeStreamAction { * @param dataChangeRecordAction action class to process {@link DataChangeRecord}s * @param heartbeatRecordAction action class to process {@link HeartbeatRecord}s * @param childPartitionsRecordAction action class to process {@link ChildPartitionsRecord}s - * @param PartitionStartRecordAction action class to process {@link PartitionStartRecord}s - * @param PartitionEndRecordAction action class to process {@link PartitionEndRecord}s - * @param PartitionEventRecordAction action class to process {@link PartitionEventRecord}s + * @param partitionStartRecordAction action class to process {@link PartitionStartRecord}s + * @param partitionEndRecordAction action class to process {@link PartitionEndRecord}s + * @param partitionEventRecordAction action class to process {@link PartitionEventRecord}s * @param metrics metrics gathering class * @param isMutableChangeStream whether the change stream is mutable or not */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOWriteTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOWriteTest.java index a5d6ac68ce66..5041b7c8f2f6 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOWriteTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOWriteTest.java @@ -1933,7 +1933,7 @@ public int hashCode() { } } - /** Coder for @link{PartitionedGlobalWindow}. */ + /** Coder for {@link PartitionedGlobalWindow}. */ private static class PartitionedGlobalWindowCoder extends AtomicCoder { @Override public void encode(PartitionedGlobalWindow window, OutputStream outStream) throws IOException { diff --git a/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseRowMutationsCoder.java b/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseRowMutationsCoder.java index 6d66cee21109..673463cf5a75 100644 --- a/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseRowMutationsCoder.java +++ b/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseRowMutationsCoder.java @@ -92,7 +92,7 @@ public List> getCoderArguments() { * which is asserted equal in this coder 2. Canonical encoding is maintained regardless of object * machine or time context * - * @throws @UnknownKeyFor@NonNull@Initialized NonDeterministicException + * @throws NonDeterministicException */ @Override public void verifyDeterministic() {} diff --git a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java index 5f53d3243001..2c712b39752e 100644 --- a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java +++ b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java @@ -416,7 +416,7 @@ public void testWriteWithWriteResults() throws Exception { } /** - * @return {@link JdbcIO.Write} transform that writes to {@param tableName} Postgres table and + * @return {@link JdbcIO.Write} transform that writes to {@code tableName} Postgres table and * returns all fields of modified rows. */ private static JdbcIO.Write> getJdbcWriteWithReturning(String tableName) { diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java index a05abba06e75..a0fb595b3aab 100644 --- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java +++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/ReadFromKafkaDoFn.java @@ -116,7 +116,7 @@ * and will only return a non-null {@link org.apache.beam.sdk.transforms.splittabledofn.SplitResult} * for a checkpoint. To the extent possible in the SDK, this reduces the risk of overwriting * committed offsets when {@code enable.auto.commit} is set and prevents concurrent use of - * per-{@TopicPartition} cached {@link Consumer} resources. + * per-{@link TopicPartition} cached {@link Consumer} resources. * *

          TODO(https://github.com/apache/beam/issues/20280): Add support for initial splitting. * diff --git a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java index b1133eadb1cb..b7426c336d63 100644 --- a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java +++ b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOIT.java @@ -128,7 +128,7 @@ /** * IO Integration test for {@link org.apache.beam.sdk.io.kafka.KafkaIO}. * - *

          {@see https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests} for + *

          {@link https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests} for * more details. * *

          NOTE: This test sets retention policy of the messages so that all messages are retained in the diff --git a/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java b/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java index 669cc3c43f2e..73008a015c41 100644 --- a/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java +++ b/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/ExchangeTestPlan.java @@ -52,7 +52,7 @@ public ExchangeTestPlan(RabbitMqIO.Read read, int maxRecordsRead, int numRecords * @param read Read semantics to use for a test * @param maxRecordsRead Maximum messages to be processed by Beam within a test * @param numRecordsToPublish Number of messages that will be published to the exchange as part of - * a test. Note that this will frequently be the same value as {@code numRecordsRead} in which + * a test. Note that this will frequently be the same value as {@code maxRecordsRead} in which * case it's simpler to use {@link #ExchangeTestPlan(RabbitMqIO.Read, int)}, but when testing * topic exchanges or exchanges where not all messages will be routed to the queue being read * from, these numbers will differ. diff --git a/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreIO.java b/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreIO.java index b8057d879686..db5ae34d7376 100644 --- a/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreIO.java +++ b/sdks/java/io/singlestore/src/main/java/org/apache/beam/sdk/io/singlestore/SingleStoreIO.java @@ -321,8 +321,8 @@ public DataSourceConfiguration withDatabase(String database) { *

          NOTE - The "user" and "password" properties can be add via {@link #withUsername(String)}, * {@link #withPassword(String)}, so they do not need to be included here. * - *

          Full list of supported properties can be found here {@link ...} + *

          Full list of supported properties can be found here ... */ public DataSourceConfiguration withConnectionProperties(String connectionProperties) { checkNotNull(connectionProperties, "connectionProperties can not be null"); diff --git a/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java b/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java index 32258934d0d9..77e20ef49b94 100644 --- a/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java +++ b/sdks/java/io/sparkreceiver/3/src/test/java/org/apache/beam/sdk/io/sparkreceiver/SparkReceiverIOIT.java @@ -77,8 +77,9 @@ /** * IO Integration test for {@link org.apache.beam.sdk.io.sparkreceiver.SparkReceiverIO}. * - *

          {@see https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests} for - * more details. + *

          See https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests + * for more details. * *

          NOTE: This test sets retention policy of the messages so that all messages are retained in the * topic so that we could read them back after writing. From 0c618a0cf4cf76df3bc4b268687850aed57f6d5b Mon Sep 17 00:00:00 2001 From: Radek Stankiewicz Date: Thu, 5 Mar 2026 10:27:56 +0100 Subject: [PATCH 2/7] Fix JdbcUtil after merge --- .../src/main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java index 6cc3e6f3b9a6..6cfece500ad6 100644 --- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java +++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcUtil.java @@ -455,12 +455,10 @@ private static Calendar withTimestampAndTimezone(DateTime dateTime) { return calendar; } - /** - * Returns a {@code JdbcReadPartitionsHelper} instance associated with the given {@param type}. - */ + /** Returns a {@code JdbcReadPartitionsHelper} instance associated with the given {@code type}. */ static @Nullable JdbcReadWithPartitionsHelper getPartitionsHelper(TypeDescriptor type) { // This cast is unchecked, thus this is a small type-checking risk. We just need - // to make sure that all preset helpers in `JdbcUtil.PRESET_HELPERS` are matched + // to make sure that all preset helpers in {@code PRESET_HELPERS} are matched // in type from their Key and their Value. return (JdbcReadWithPartitionsHelper) PRESET_HELPERS.get(type.getRawType()); } From 0b5f27d28a7850e4c5eb0abeed7a361ef5bf030b Mon Sep 17 00:00:00 2001 From: Radek Stankiewicz Date: Thu, 5 Mar 2026 12:33:05 +0100 Subject: [PATCH 3/7] spotless --- .../src/main/java/org/apache/beam/sdk/transforms/MapValues.java | 1 - 1 file changed, 1 deletion(-) diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapValues.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapValues.java index 072fd86b2527..ef0ecdca518b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapValues.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapValues.java @@ -130,7 +130,6 @@ public SimpleMapWithFailures, KV, FailureT> exceptio * .via(word -> 1 / word.length) // Could throw ArithmeticException * .exceptionsVia( * new InferableFunction>, String>() { - * @Override * public String apply(ExceptionElement> input) { * return input.exception().getMessage(); * } From 96ce332b2c0771187106200336d7558b3a130f6b Mon Sep 17 00:00:00 2001 From: Radek Stankiewicz Date: Thu, 5 Mar 2026 15:54:26 +0100 Subject: [PATCH 4/7] changes --- .../src/main/java/org/apache/beam/sdk/transforms/MapValues.java | 1 + 1 file changed, 1 insertion(+) diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapValues.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapValues.java index ef0ecdca518b..072fd86b2527 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapValues.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapValues.java @@ -130,6 +130,7 @@ public SimpleMapWithFailures, KV, FailureT> exceptio * .via(word -> 1 / word.length) // Could throw ArithmeticException * .exceptionsVia( * new InferableFunction>, String>() { + * @Override * public String apply(ExceptionElement> input) { * return input.exception().getMessage(); * } From 5019232f1a876f32b3d2bd36a862ffd619f6aa9a Mon Sep 17 00:00:00 2001 From: Radek Stankiewicz Date: Thu, 5 Mar 2026 17:57:12 +0100 Subject: [PATCH 5/7] leave ignore block --- .../main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy | 3 --- 1 file changed, 3 deletions(-) diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy index c8b174fc9ded..972fb7a6063c 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy @@ -1547,9 +1547,6 @@ class BeamModulePlugin implements Plugin { "ExtendsAutoValue", "InlineMeSuggester", "InvalidBlockTag", - "InvalidInlineTag", - "InvalidLink", - "InvalidParam", "InvalidThrows", "JavaTimeDefaultTimeZone", "JavaUtilDate", From 6e0c429d226dc74bd1fcd04570a7c6289b101169 Mon Sep 17 00:00:00 2001 From: Radek Stankiewicz Date: Fri, 6 Mar 2026 18:32:55 +0100 Subject: [PATCH 6/7] Fix InvalidLink and restore InvalidBlockTag to disabledChecks --- .../org/apache/beam/gradle/BeamModulePlugin.groovy | 1 + .../windmill/client/AbstractWindmillStream.java | 2 +- .../apache/beam/sdk/metrics/MetricsEnvironment.java | 11 ++++++----- .../java/org/apache/beam/sdk/transforms/MapKeys.java | 1 - .../sdk/io/aws2/common/AsyncBatchWriteHandler.java | 4 ++-- 5 files changed, 10 insertions(+), 9 deletions(-) diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy index 972fb7a6063c..5fed458ac82f 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy @@ -1531,6 +1531,7 @@ class BeamModulePlugin implements Plugin { } def disabledChecks = [ // TODO(https://github.com/apache/beam/issues/20955): Enable errorprone checks + "InvalidBlockTag", "AutoValueImmutableFields", "AutoValueImmutableFields", "AutoValueSubclassLeaked", diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java index 9ef87ef92ab9..7d74b868056f 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/client/AbstractWindmillStream.java @@ -61,7 +61,7 @@ * stream connection. {@link #onFlushPending(boolean)} to perform any work that must be done when a * new stream is created, such as sending headers or retrying requests. * - *

          {@link #trySend(RequestT)} and {@link #startStream()} should not be called when handling + *

          {@link #trySend(Object)} and {@link #startStream()} should not be called when handling * responses; use {@link #executeSafely(Runnable)} instead. * *

          Synchronization on this is used to synchronize the gRpc stream state and internal data diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java index 3421bb4afc85..2a88dd0025df 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java @@ -164,12 +164,13 @@ public static class MetricsContainerHolder implements MetricsEnvironmentState { } } - /** - * Set the {@link MetricsContainer} for the associated {@link MetricsEnvironment}. - * - * @return The previous container for the associated {@link MetricsEnvironment}. - */ + /** Set the {@link MetricsContainer} for the associated {@link MetricsEnvironment}. */ public interface MetricsEnvironmentState { + /** + * Activates the given container. + * + * @return The previous container for the associated {@link MetricsEnvironment}. + */ @Nullable MetricsContainer activate(@Nullable MetricsContainer metricsContainer); } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapKeys.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapKeys.java index cb6ee84c9aaf..b93e6e288c73 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapKeys.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/MapKeys.java @@ -129,7 +129,6 @@ public SimpleMapWithFailures, KV, FailureT> exceptio * .via(word -> 1 / word.length) // Could throw ArithmeticException * .exceptionsVia( * new InferableFunction>, String>() { - * @Override * public String apply(ExceptionElement> input) { * return input.exception().getMessage(); * } diff --git a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/common/AsyncBatchWriteHandler.java b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/common/AsyncBatchWriteHandler.java index 95a65fe11658..74319151e3d4 100644 --- a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/common/AsyncBatchWriteHandler.java +++ b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/common/AsyncBatchWriteHandler.java @@ -286,8 +286,8 @@ protected List failedRecords(List records, List errors) { * already been attempted by the AWS SDK in that case. * * - * The next call of {@link #checkForAsyncFailure()}, {@link #batchWrite(String, List< RecT >)}} or - * {@link #waitForCompletion()} will check for the last async failure and throw it. Afterwards the + * The next call of {@link #checkForAsyncFailure()}, {@link #batchWrite(String, List)} or {@link + * #waitForCompletion()} will check for the last async failure and throw it. Afterwards the * failure state is reset. */ private class RetryHandler implements BiConsumer, Throwable> { From 0dbd2e51043512c6ba2198673d1357de8b8fd88b Mon Sep 17 00:00:00 2001 From: Radek Stankiewicz Date: Fri, 6 Mar 2026 19:06:55 +0100 Subject: [PATCH 7/7] Remove duplicate entry --- .../main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy | 1 - 1 file changed, 1 deletion(-) diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy index 5fed458ac82f..972fb7a6063c 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy @@ -1531,7 +1531,6 @@ class BeamModulePlugin implements Plugin { } def disabledChecks = [ // TODO(https://github.com/apache/beam/issues/20955): Enable errorprone checks - "InvalidBlockTag", "AutoValueImmutableFields", "AutoValueImmutableFields", "AutoValueSubclassLeaked",

          Record #1: {@param changeSequenceNumber}Record #2: {@param changeSequenceNumber}Record #1: {@code changeSequenceNumber}Record #2: {@code changeSequenceNumber}BigQuery API compares as