This is an automated email from the ASF dual-hosted git repository.
fcsaky pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-connector-kafka.git
The following commit(s) were added to refs/heads/main by this push:
new 741673ef [FLINK-39138] Update `flink-connector-parent` version to 2.0.0
741673ef is described below
commit 741673ef0350143dd2366fb4a0466c640a4012c9
Author: Thomas Cooper <[email protected]>
AuthorDate: Tue Mar 17 18:31:34 2026 +0000
[FLINK-39138] Update `flink-connector-parent` version to 2.0.0
---
.../SingleClusterTopicMetadataService.java | 8 +-
.../kafka/lineage/DefaultKafkaDatasetFacet.java | 16 +-
.../lineage/DefaultKafkaDatasetIdentifier.java | 16 +-
.../kafka/lineage/DefaultTypeDatasetFacet.java | 16 +-
.../connector/kafka/lineage/KafkaDatasetFacet.java | 16 +-
.../kafka/lineage/KafkaDatasetFacetProvider.java | 16 +-
.../kafka/lineage/KafkaDatasetIdentifier.java | 16 +-
.../lineage/KafkaDatasetIdentifierProvider.java | 16 +-
.../connector/kafka/lineage/TypeDatasetFacet.java | 16 +-
.../kafka/lineage/TypeDatasetFacetProvider.java | 16 +-
.../kafka/sink/ExactlyOnceKafkaWriter.java | 6 +
.../kafka/sink/KafkaRecordSerializationSchema.java | 4 +-
.../kafka/sink/TransactionNamingStrategy.java | 1 +
.../kafka/sink/TwoPhaseCommittingStatefulSink.java | 19 ++
.../kafka/sink/internal/BackchannelImpl.java | 2 +
.../kafka/sink/internal/ProducerPoolImpl.java | 4 +
.../TransactionAbortStrategyContextImpl.java | 1 +
.../source/enumerator/KafkaSourceEnumState.java | 1 +
.../enumerator/KafkaSourceEnumStateSerializer.java | 2 +
.../source/enumerator/KafkaSourceEnumerator.java | 6 +-
.../kafka/table/KafkaConnectorOptionsUtil.java | 3 +-
.../connectors/kafka/table/TableDataTypeUtils.java | 19 ++
.../DynamicKafkaSourceEnumeratorTest.java | 4 +-
.../connector/kafka/lineage/LineageUtilTest.java | 16 +-
.../flink/connector/kafka/sink/KafkaSinkTest.java | 16 +-
.../sink/KafkaWriterFaultToleranceITCase.java | 1 +
.../DynamicKafkaRecordSerializationSchemaTest.java | 19 ++
.../kafka/table/TableDataTypeUtilsTest.java | 19 ++
flink-python/pom.xml | 6 -
pom.xml | 4 +-
tools/maven/checkstyle.xml | 209 +++++----------------
31 files changed, 255 insertions(+), 259 deletions(-)
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/dynamic/metadata/SingleClusterTopicMetadataService.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/dynamic/metadata/SingleClusterTopicMetadataService.java
index a81c5586..def2dce6 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/dynamic/metadata/SingleClusterTopicMetadataService.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/dynamic/metadata/SingleClusterTopicMetadataService.java
@@ -95,8 +95,12 @@ public class SingleClusterTopicMetadataService implements
KafkaMetadataService {
@Override
public Map<String, KafkaStream> describeStreams(Collection<String>
streamIds) {
try {
- return getAdminClient().describeTopics(new
ArrayList<>(streamIds)).allTopicNames().get()
- .keySet().stream()
+ return getAdminClient()
+ .describeTopics(new ArrayList<>(streamIds))
+ .allTopicNames()
+ .get()
+ .keySet()
+ .stream()
.collect(Collectors.toMap(topic -> topic,
this::createKafkaStream));
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException("Fetching all streams failed", e);
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultKafkaDatasetFacet.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultKafkaDatasetFacet.java
index ee94443b..9d126e4d 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultKafkaDatasetFacet.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultKafkaDatasetFacet.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultKafkaDatasetIdentifier.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultKafkaDatasetIdentifier.java
index be6108a8..09a4cd39 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultKafkaDatasetIdentifier.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultKafkaDatasetIdentifier.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultTypeDatasetFacet.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultTypeDatasetFacet.java
index 116f4139..271b05e7 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultTypeDatasetFacet.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/DefaultTypeDatasetFacet.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetFacet.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetFacet.java
index 16ee4fb3..42ce28b5 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetFacet.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetFacet.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetFacetProvider.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetFacetProvider.java
index 00367642..205e0f5f 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetFacetProvider.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetFacetProvider.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetIdentifier.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetIdentifier.java
index 801c01d0..6d2db7b2 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetIdentifier.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetIdentifier.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetIdentifierProvider.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetIdentifierProvider.java
index 918b1f5e..cfa8849f 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetIdentifierProvider.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/KafkaDatasetIdentifierProvider.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/TypeDatasetFacet.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/TypeDatasetFacet.java
index 74fdcbce..afa4adbd 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/TypeDatasetFacet.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/TypeDatasetFacet.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/TypeDatasetFacetProvider.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/TypeDatasetFacetProvider.java
index f19b50d6..4282ebb8 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/TypeDatasetFacetProvider.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/lineage/TypeDatasetFacetProvider.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/ExactlyOnceKafkaWriter.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/ExactlyOnceKafkaWriter.java
index eb84977d..64eb1a35 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/ExactlyOnceKafkaWriter.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/ExactlyOnceKafkaWriter.java
@@ -68,15 +68,18 @@ import static
org.apache.flink.util.Preconditions.checkNotNull;
*/
class ExactlyOnceKafkaWriter<IN> extends KafkaWriter<IN> {
private static final Logger LOG =
LoggerFactory.getLogger(ExactlyOnceKafkaWriter.class);
+
/**
* Prefix for the transactional id. Must be unique across all sinks
writing to the same broker.
*/
private final String transactionalIdPrefix;
+
/**
* Strategy to abort lingering transactions from previous executions
during writer
* initialization.
*/
private final TransactionAbortStrategyImpl transactionAbortStrategy;
+
/** Strategy to name transactions. */
private final TransactionNamingStrategyImpl transactionNamingStrategy;
@@ -89,17 +92,20 @@ class ExactlyOnceKafkaWriter<IN> extends KafkaWriter<IN> {
* to committer).
*/
private final ProducerPool producerPool;
+
/**
* Backchannel used to communicate committed transactions from the
committer to this writer.
* Establishing the channel happens during recovery. Thus, it is only safe
to poll in checkpoint
* related methods.
*/
private final ReadableBackchannel<TransactionFinished> backchannel;
+
/** The context used to name transactions. */
private final TransactionNamingStrategyContextImpl namingContext;
private final int totalNumberOfOwnedSubtasks;
private final int[] ownedSubtaskIds;
+
/** Lazily created admin client for {@link TransactionAbortStrategyImpl}.
*/
private AdminClient adminClient;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/KafkaRecordSerializationSchema.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/KafkaRecordSerializationSchema.java
index f56a7da5..74e5e5fa 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/KafkaRecordSerializationSchema.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/KafkaRecordSerializationSchema.java
@@ -77,7 +77,9 @@ public interface KafkaRecordSerializationSchema<T> extends
Serializable {
*/
int getParallelInstanceId();
- /** @return number of parallel KafkaSink tasks. */
+ /**
+ * @return number of parallel KafkaSink tasks.
+ */
int getNumberOfParallelInstances();
/**
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/TransactionNamingStrategy.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/TransactionNamingStrategy.java
index 6ebfb1fe..607cc697 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/TransactionNamingStrategy.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/TransactionNamingStrategy.java
@@ -69,6 +69,7 @@ public enum TransactionNamingStrategy {
* leaks of internal classes in signatures.
*/
private final TransactionNamingStrategyImpl impl;
+
/**
* The set of supported abort strategies for this naming strategy. Some
naming strategies may
* not support all abort strategies.
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/TwoPhaseCommittingStatefulSink.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/TwoPhaseCommittingStatefulSink.java
index 3d2c1989..2db07e49 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/TwoPhaseCommittingStatefulSink.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/TwoPhaseCommittingStatefulSink.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package org.apache.flink.connector.kafka.sink;
import org.apache.flink.annotation.Internal;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/BackchannelImpl.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/BackchannelImpl.java
index a6996000..c8581ddf 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/BackchannelImpl.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/BackchannelImpl.java
@@ -42,6 +42,7 @@ public final class BackchannelImpl<T> {
* readable and writable channels are closed.
*/
private final Runnable closeAction;
+
/**
* The messages to be sent from the writer to the committer. It's a thread
safe deque in case
* committer and writer are not chained.
@@ -50,6 +51,7 @@ public final class BackchannelImpl<T> {
/** The readable backchannel. */
private volatile ReadableBackchannel<T> readableBackchannel;
+
/** The writable backchannel. */
private volatile WritableBackchannel<T> writableBackchannel;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/ProducerPoolImpl.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/ProducerPoolImpl.java
index 3789ea09..31b0381a 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/ProducerPoolImpl.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/ProducerPoolImpl.java
@@ -81,19 +81,23 @@ public class ProducerPoolImpl implements ProducerPool {
* is empty.
*/
private final Properties kafkaProducerConfig;
+
/** Callback to allow the writer to init metrics. */
private final Consumer<FlinkKafkaInternalProducer<byte[], byte[]>>
producerInit;
+
/**
* The pool of producers that are available for reuse. This pool is used
to avoid creating new
* producers for every transaction.
*/
private final Deque<FlinkKafkaInternalProducer<byte[], byte[]>>
producerPool =
new ArrayDeque<>();
+
/**
* The map of ongoing transactions (id -> producer/CheckpointTransaction).
This is used to keep
* track of the transactions that are ongoing and the respective producers
are not in the pool.
*/
private final Map<String, ProducerEntry> producerByTransactionalId = new
TreeMap<>();
+
/**
* A secondary tracking structure to quickly find transactions coming from
an earlier
* checkpoints.
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/TransactionAbortStrategyContextImpl.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/TransactionAbortStrategyContextImpl.java
index c88485ce..6dbed4c3 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/TransactionAbortStrategyContextImpl.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/sink/internal/TransactionAbortStrategyContextImpl.java
@@ -47,6 +47,7 @@ public class TransactionAbortStrategyContextImpl implements
TransactionAbortStra
private final Set<String> prefixesToAbort;
private final long startCheckpointId;
private final TransactionAborter transactionAborter;
+
/** Transactional ids that mustn't be aborted. */
private final Set<String> precommittedTransactionIds;
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumState.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumState.java
index 649bd584..20472ac3 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumState.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumState.java
@@ -31,6 +31,7 @@ import java.util.stream.Collectors;
public class KafkaSourceEnumState {
/** Splits with status: ASSIGNED or UNASSIGNED_INITIAL. */
private final Set<SplitAndAssignmentStatus> splits;
+
/**
* this flag will be marked as true if initial partitions are discovered
after enumerator
* starts.
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumStateSerializer.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumStateSerializer.java
index a79db37c..c42194b7 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumStateSerializer.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumStateSerializer.java
@@ -52,8 +52,10 @@ public class KafkaSourceEnumStateSerializer
* assigned splits.
*/
private static final int VERSION_0 = 0;
+
/** state of VERSION_1 only contains assignedPartitions, which is a list
of assigned splits. */
private static final int VERSION_1 = 1;
+
/**
* state of VERSION_2 contains initialDiscoveryFinished and partitions
with different assignment
* status.
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumerator.java
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumerator.java
index da73fd86..4eb67181 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumerator.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/connector/kafka/source/enumerator/KafkaSourceEnumerator.java
@@ -766,7 +766,8 @@ public class KafkaSourceEnumerator
.collect(
Collectors.toMap(
partition -> partition, __
-> offsetSpec)))
- .entrySet().stream()
+ .entrySet()
+ .stream()
.collect(
Collectors.toMap(
Map.Entry::getKey, entry ->
entry.getValue().offset()));
@@ -793,7 +794,8 @@ public class KafkaSourceEnumerator
entry ->
OffsetSpec.forTimestamp(
entry.getValue()))))
- .entrySet().stream()
+ .entrySet()
+ .stream()
// OffsetAndTimestamp cannot be initialized with a
negative offset, which is
// possible if the timestamp does not correspond to an
offset and the topic
// partition is empty
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/KafkaConnectorOptionsUtil.java
b/flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/KafkaConnectorOptionsUtil.java
index f9ac8fb5..ecaa3091 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/KafkaConnectorOptionsUtil.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/KafkaConnectorOptionsUtil.java
@@ -99,6 +99,7 @@ class KafkaConnectorOptionsUtil {
protected static final String DEBEZIUM_AVRO_CONFLUENT =
"debezium-avro-confluent";
private static final List<String> SCHEMA_REGISTRY_FORMATS =
Arrays.asList(AVRO_CONFLUENT, DEBEZIUM_AVRO_CONFLUENT);
+
//
--------------------------------------------------------------------------------------------
// Validation
//
--------------------------------------------------------------------------------------------
@@ -393,7 +394,7 @@ class KafkaConnectorOptionsUtil {
case SINK_PARTITIONER_VALUE_DEFAULT:
case SINK_PARTITIONER_VALUE_ROUND_ROBIN:
return Optional.empty();
- // Default fallback to full class name of
the partitioner.
+ // Default fallback to full class name of the
partitioner.
default:
return Optional.of(
initializePartitioner(partitioner,
classLoader));
diff --git
a/flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/TableDataTypeUtils.java
b/flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/TableDataTypeUtils.java
index 6a598c6c..c7ccebf9 100644
---
a/flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/TableDataTypeUtils.java
+++
b/flink-connector-kafka/src/main/java/org/apache/flink/streaming/connectors/kafka/table/TableDataTypeUtils.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package org.apache.flink.streaming.connectors.kafka.table;
import org.apache.flink.table.types.DataType;
diff --git
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/dynamic/source/enumerator/DynamicKafkaSourceEnumeratorTest.java
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/dynamic/source/enumerator/DynamicKafkaSourceEnumeratorTest.java
index 21eb4399..40e0dd00 100644
---
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/dynamic/source/enumerator/DynamicKafkaSourceEnumeratorTest.java
+++
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/dynamic/source/enumerator/DynamicKafkaSourceEnumeratorTest.java
@@ -434,7 +434,9 @@ public class DynamicKafkaSourceEnumeratorTest {
DynamicKafkaSourceEnumState stateBeforeSplitAssignment =
enumerator.snapshotState(-1);
assertThat(
-
stateBeforeSplitAssignment.getClusterEnumeratorStates().values()
+ stateBeforeSplitAssignment
+ .getClusterEnumeratorStates()
+ .values()
.stream()
.map(subState ->
subState.assignedSplits().stream())
.count())
diff --git
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/lineage/LineageUtilTest.java
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/lineage/LineageUtilTest.java
index dd48ef62..b3e1cc02 100644
---
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/lineage/LineageUtilTest.java
+++
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/lineage/LineageUtilTest.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.lineage;
diff --git
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/sink/KafkaSinkTest.java
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/sink/KafkaSinkTest.java
index dfb60c56..e2a6f29b 100644
---
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/sink/KafkaSinkTest.java
+++
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/sink/KafkaSinkTest.java
@@ -1,18 +1,20 @@
/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
*/
package org.apache.flink.connector.kafka.sink;
diff --git
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/sink/KafkaWriterFaultToleranceITCase.java
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/sink/KafkaWriterFaultToleranceITCase.java
index 08ec2c62..5c0fec46 100644
---
a/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/sink/KafkaWriterFaultToleranceITCase.java
+++
b/flink-connector-kafka/src/test/java/org/apache/flink/connector/kafka/sink/KafkaWriterFaultToleranceITCase.java
@@ -37,6 +37,7 @@ public class KafkaWriterFaultToleranceITCase extends
KafkaWriterTestBase {
private static final String INIT_KAFKA_RETRIES = "0";
private static final String INIT_KAFKA_REQUEST_TIMEOUT_MS = "1000";
private static final String INIT_KAFKA_MAX_BLOCK_MS = "1000";
+
/**
* The delivery timeout has to be greater than the request timeout as the
latter is part of the
* former and this is enforced by a compile time check.
diff --git
a/flink-connector-kafka/src/test/java/org/apache/flink/streaming/connectors/kafka/table/DynamicKafkaRecordSerializationSchemaTest.java
b/flink-connector-kafka/src/test/java/org/apache/flink/streaming/connectors/kafka/table/DynamicKafkaRecordSerializationSchemaTest.java
index 5a1a0e29..5a083927 100644
---
a/flink-connector-kafka/src/test/java/org/apache/flink/streaming/connectors/kafka/table/DynamicKafkaRecordSerializationSchemaTest.java
+++
b/flink-connector-kafka/src/test/java/org/apache/flink/streaming/connectors/kafka/table/DynamicKafkaRecordSerializationSchemaTest.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package org.apache.flink.streaming.connectors.kafka.table;
import org.apache.flink.api.common.serialization.SerializationSchema;
diff --git
a/flink-connector-kafka/src/test/java/org/apache/flink/streaming/connectors/kafka/table/TableDataTypeUtilsTest.java
b/flink-connector-kafka/src/test/java/org/apache/flink/streaming/connectors/kafka/table/TableDataTypeUtilsTest.java
index b3d8ff6c..00c61b8c 100644
---
a/flink-connector-kafka/src/test/java/org/apache/flink/streaming/connectors/kafka/table/TableDataTypeUtilsTest.java
+++
b/flink-connector-kafka/src/test/java/org/apache/flink/streaming/connectors/kafka/table/TableDataTypeUtilsTest.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package org.apache.flink.streaming.connectors.kafka.table;
import org.apache.flink.table.types.DataType;
diff --git a/flink-python/pom.xml b/flink-python/pom.xml
index c418c310..dbfe468e 100644
--- a/flink-python/pom.xml
+++ b/flink-python/pom.xml
@@ -143,12 +143,6 @@ under the License.
<outputDirectory>${project.build.directory}/test-dependencies</outputDirectory>
</configuration>
</execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <executions>
<execution>
<id>copy-dependencies</id>
<phase>package</phase>
diff --git a/pom.xml b/pom.xml
index 35178dfc..06ac70c9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,7 +22,7 @@ under the License.
<parent>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-parent</artifactId>
- <version>1.0.0</version>
+ <version>2.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
@@ -557,7 +557,7 @@ under the License.
<plugin>
<!-- activate API compatibility checks -->
- <groupId>io.github.zentol.japicmp</groupId>
+ <groupId>com.github.siom79.japicmp</groupId>
<artifactId>japicmp-maven-plugin</artifactId>
</plugin>
diff --git a/tools/maven/checkstyle.xml b/tools/maven/checkstyle.xml
index 157888a5..ff6c1393 100644
--- a/tools/maven/checkstyle.xml
+++ b/tools/maven/checkstyle.xml
@@ -31,11 +31,6 @@ This file is based on the checkstyle file of Apache Beam.
<module name="Checker">
- <module name="NewlineAtEndOfFile">
- <!-- windows can use \r\n vs \n, so enforce the most used one
ie UNIx style -->
- <property name="lineSeparator" value="lf"/>
- </module>
-
<module name="RegexpSingleline">
<!-- Checks that TODOs don't have stuff in parenthesis, e.g.,
username. -->
<property name="format" value="((//.*)|(\*.*))TODO\("/>
@@ -55,18 +50,10 @@ This file is based on the checkstyle file of Apache Beam.
<property name="severity" value="error"/>
</module>
- <!-- Prevent *Tests.java as tools may not pick them up -->
- <!--<module name="RegexpOnFilename">-->
- <!--<property name="fileNamePattern" value=".*Tests\.java$" />-->
- <!--</module>-->
-
<module name="SuppressionFilter">
<property name="file" value="${checkstyle.suppressions.file}"
default="suppressions.xml"/>
</module>
- <!-- Check that every module has a package-info.java -->
- <!--<module name="JavadocPackage"/>-->
-
<!--
FLINK CUSTOM CHECKS
@@ -74,7 +61,7 @@ This file is based on the checkstyle file of Apache Beam.
-->
<module name="FileLength">
- <property name="max" value="3000"/>
+ <property name="max" value="3100"/>
</module>
<!-- All Java AST specific tests live under TreeWalker module. -->
@@ -169,6 +156,11 @@ This file is based on the checkstyle file of Apache Beam.
<property name="illegalPattern" value="true"/>
<property name="message" value="Use
com.fasterxml.jackson instead of jettison."/>
</module>
+ <module name="Regexp">
+ <property name="format"
value="org\.testcontainers\.shaded"/>
+ <property name="illegalPattern" value="true"/>
+ <property name="message" value="Use utilities from
appropriate library instead of org.testcontainers."/>
+ </module>
<!-- Enforce Java-style array declarations -->
<module name="ArrayTypeStyle"/>
@@ -184,38 +176,27 @@ This file is based on the checkstyle file of Apache Beam.
-->
- <module name="RedundantImport">
- <!-- Checks for redundant import statements. -->
- <property name="severity" value="error"/>
- <message key="import.redundancy"
- value="Redundant import {0}."/>
- </module>
-
- <module name="ImportOrder">
- <!-- Checks for out of order import statements. -->
- <property name="severity" value="error"/>
- <property name="groups"
-
value="org.apache.flink,org.apache.flink.shaded,*,javax,java,scala"/>
- <property name="separated" value="true"/>
- <property name="sortStaticImportsAlphabetically"
value="true"/>
- <property name="option" value="bottom"/>
- <property name="tokens" value="STATIC_IMPORT, IMPORT"/>
- <message key="import.ordering"
- value="Import {0} appears after other
imports that it should precede"/>
- </module>
-
<module name="AvoidStarImport">
<property name="severity" value="error"/>
</module>
<module name="IllegalImport">
<property name="illegalPkgs"
- value="autovalue.shaded, avro.shaded,
com.google.api.client.repackaged, com.google"/>
+ value="org.mockito, org.powermock"/>
+ <message key="import.illegal" value="{0}; Mocking is
discouraged. Please refer to the coding guidelines:
https://flink.apache.org/how-to-contribute/code-style-and-quality-common/#avoid-mockito---use-reusable-test-implementations."/>
+ </module>
+ <module name="IllegalImport">
+ <property name="illegalPkgs"
+ value="autovalue.shaded, avro.shaded,
com.google.api.client.repackaged, com.google.appengine.repackaged"/>
</module>
<module name="IllegalImport">
<property name="illegalPkgs"
value="org.codehaus.jackson"/>
<message key="import.illegal" value="{0}; Use
flink-shaded-jackson instead."/>
</module>
+ <module name="IllegalImport">
+ <property name="illegalPkgs"
value="com.jayway.jsonpath"/>
+ <message key="import.illegal" value="{0}; Use
flink-shaded-jsonpath instead."/>
+ </module>
<module name="IllegalImport">
<property name="illegalPkgs" value="org.objectweb.asm"/>
<message key="import.illegal" value="{0}; Use
flink-shaded-asm instead."/>
@@ -225,13 +206,13 @@ This file is based on the checkstyle file of Apache Beam.
<message key="import.illegal" value="{0}; Use
flink-shaded-netty instead."/>
</module>
<module name="IllegalImport">
- <property name="illegalPkgs" value="com.google"/>
- <message key="import.illegal" value="{0}; Don't use
guava in prod code."/>
+ <property name="illegalPkgs" value="com.google.common"/>
+ <message key="import.illegal" value="{0}; Use
flink-shaded-guava instead."/>
</module>
<module name="RedundantModifier">
<!-- Checks for redundant modifiers on various symbol
definitions.
- See:
http://checkstyle.sourceforge.net/config_modifier.html#RedundantModifier
+ See:
https://checkstyle.sourceforge.io/checks/modifier/redundantmodifier.html#RedundantModifier
We exclude METHOD_DEF to allow final methods in final
classes to make them more future-proof.
-->
@@ -243,9 +224,6 @@ This file is based on the checkstyle file of Apache Beam.
IllegalImport cannot blacklist classes, and
c.g.api.client.util is used for some shaded
code and some useful code. So we need to fall back to
Regexp.
-->
- <!--<module name="RegexpSinglelineJava">-->
- <!--<property name="format"
value="com\.google\.api\.client\.util\.(ByteStreams|Charsets|Collections2|Joiner|Lists|Maps|Objects|Preconditions|Sets|Strings|Throwables)"/>-->
- <!--</module>-->
<!--
Require static importing from Preconditions.
@@ -255,11 +233,27 @@ This file is based on the checkstyle file of Apache Beam.
<property name="message" value="Static import functions
from Guava Preconditions"/>
</module>
- <module name="UnusedImports">
- <property name="severity" value="error"/>
- <property name="processJavadoc" value="true"/>
- <message key="import.unused"
- value="Unused import: {0}."/>
+ <!--
+ The Nullable & Nonnull annotations check.
+ -->
+ <module name="RegexpSinglelineJava">
+ <property name="format" value="^import
org.jetbrains.annotations.Nullable;$"/>
+ <property name="message" value="Use import
javax.annotation.Nullable"/>
+ </module>
+
+ <module name="RegexpSinglelineJava">
+ <property name="format" value="^import
org.jetbrains.annotations.NotNull;$"/>
+ <property name="message" value="Use import
javax.annotation.Nonnull"/>
+ </module>
+
+ <module name="RegexpSinglelineJava">
+ <property name="format"
value="^\s*@org.jetbrains.annotations.Nullable\s*$"/>
+ <property name="message" value="Use import
javax.annotation.Nullable"/>
+ </module>
+
+ <module name="RegexpSinglelineJava">
+ <property name="format"
value="^\s*@org.jetbrains.annotations.Nonnull\s*$"/>
+ <property name="message" value="Use import
javax.annotation.Nonnull"/>
</module>
<!--
@@ -269,18 +263,12 @@ This file is based on the checkstyle file of Apache Beam.
-->
<!-- Checks for Javadoc comments. -->
- <!-- See http://checkstyle.sf.net/config_javadoc.html -->
+ <!-- See
https://checkstyle.sourceforge.io/checks/javadoc/javadocmethod.html -->
<module name="JavadocMethod">
- <property name="scope" value="protected"/>
+ <property name="accessModifiers" value="protected"/>
<property name="severity" value="error"/>
- <property name="allowMissingJavadoc" value="true"/>
<property name="allowMissingParamTags" value="true"/>
<property name="allowMissingReturnTag" value="true"/>
- <property name="allowMissingThrowsTags" value="true"/>
- <property name="allowThrowsTagsForSubclasses"
value="true"/>
- <property name="allowUndeclaredRTE" value="true"/>
- <!-- This check sometimes failed for with "Unable to
get class information for @throws tag" for custom exceptions -->
- <property name="suppressLoadErrors" value="true"/>
</module>
<!-- Check that paragraph tags are used correctly in Javadoc.
-->
@@ -384,45 +372,12 @@ This file is based on the checkstyle file of Apache Beam.
<property name="severity" value="error"/>
</module>
- <!-- Type parameters must be either one of the four blessed
letters
- T, K, V, W, X or else be capital-case terminated with a T,
- such as MyGenericParameterT -->
- <!--<module name="ClassTypeParameterName">-->
- <!--<property name="format"
value="^(((T|K|V|W|X)[0-9]*)|([A-Z][a-z][a-zA-Z]*T))$"/>-->
- <!--<property name="severity" value="error"/>-->
- <!--</module>-->
-
- <!--<module name="MethodTypeParameterName">-->
- <!--<property name="format"
value="^(((T|K|V|W|X)[0-9]*)|([A-Z][a-z][a-zA-Z]*T))$"/>-->
- <!--<property name="severity" value="error"/>-->
- <!--</module>-->
-
- <!--<module name="InterfaceTypeParameterName">-->
- <!--<property name="format"
value="^(((T|K|V|W|X)[0-9]*)|([A-Z][a-z][a-zA-Z]*T))$"/>-->
- <!--<property name="severity" value="error"/>-->
- <!--</module>-->
-
<!--
LENGTH and CODING CHECKS
-->
- <!--<module name="LineLength">-->
- <!--<!– Checks if a line is too long. –>-->
- <!--<property name="max" value="100"/>-->
- <!--<property name="severity" value="error"/>-->
-
- <!--<!–-->
- <!--The default ignore pattern exempts the following
elements:-->
- <!-- - import statements-->
- <!-- - long URLs inside comments-->
- <!--–>-->
-
- <!--<property name="ignorePattern"-->
- <!--value="^(package .*;\s*)|(import .*;\s*)|( *\*
.*https?://.*)$"/>-->
- <!--</module>-->
-
<!-- Checks for braces around if and else blocks -->
<module name="NeedBraces">
<property name="severity" value="error"/>
@@ -459,21 +414,6 @@ This file is based on the checkstyle file of Apache Beam.
<property name="ignoreComments" value="true"/>
</module>
- <!--
-
- MODIFIERS CHECKS
-
- -->
-
- <module name="ModifierOrder">
- <!-- Warn if modifier order is inconsistent with JLS3
8.1.1, 8.3.1, and
- 8.4.3. The prescribed order is:
- public, protected, private, abstract, static,
final, transient, volatile,
- synchronized, native, strictfp
- -->
- <property name="severity" value="error"/>
- </module>
-
<!--
@@ -492,71 +432,6 @@ This file is based on the checkstyle file of Apache Beam.
INTERFACE_DEF, ENUM_DEF, STATIC_INIT, INSTANCE_INIT, METHOD_DEF,
CTOR_DEF"/>
</module>
-
- <module name="WhitespaceAround">
- <!-- Checks that various tokens are surrounded by
whitespace.
- This includes most binary operators and
keywords followed
- by regular or curly braces.
- -->
- <property name="tokens" value="ASSIGN, BAND,
BAND_ASSIGN, BOR,
- BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR, BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN,
- EQUAL, GE, GT, LAMBDA, LAND, LE, LITERAL_CATCH, LITERAL_DO,
LITERAL_ELSE,
- LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF, LITERAL_RETURN,
- LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS,
- MINUS_ASSIGN, MOD, MOD_ASSIGN, NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION,
- SL, SL_ASSIGN, SR_ASSIGN, STAR, STAR_ASSIGN, TYPE_EXTENSION_AND"/>
- <property name="severity" value="error"/>
- </module>
-
- <module name="WhitespaceAfter">
- <!-- Checks that commas, semicolons and typecasts are
followed by
- whitespace.
- -->
- <property name="tokens" value="COMMA, SEMI, TYPECAST"/>
- </module>
-
- <module name="NoWhitespaceAfter">
- <!-- Checks that there is no whitespace after various
unary operators.
- Linebreaks are allowed.
- -->
- <property name="tokens" value="BNOT, DEC, DOT, INC,
LNOT, UNARY_MINUS,
- UNARY_PLUS"/>
- <property name="allowLineBreaks" value="true"/>
- <property name="severity" value="error"/>
- </module>
-
- <module name="NoWhitespaceBefore">
- <!-- Checks that there is no whitespace before various
unary operators.
- Linebreaks are allowed.
- -->
- <property name="tokens" value="SEMI, DOT, POST_DEC,
POST_INC"/>
- <property name="allowLineBreaks" value="true"/>
- <property name="severity" value="error"/>
- </module>
-
- <!--<module name="OperatorWrap">-->
- <!--<!– Checks that operators like + and ? appear at
newlines rather than-->
- <!--at the end of the previous line.-->
- <!--–>-->
- <!--<property name="option" value="NL"/>-->
- <!--<property name="tokens" value="BAND, BOR, BSR, BXOR, DIV,
EQUAL,-->
- <!--GE, GT, LAND, LE, LITERAL_INSTANCEOF, LOR, LT, MINUS,
MOD,-->
- <!--NOT_EQUAL, PLUS, QUESTION, SL, SR, STAR "/>-->
- <!--</module>-->
-
- <module name="OperatorWrap">
- <!-- Checks that assignment operators are at the end of
the line. -->
- <property name="option" value="eol"/>
- <property name="tokens" value="ASSIGN"/>
- </module>
-
- <module name="ParenPad">
- <!-- Checks that there is no whitespace before close
parens or after
- open parens.
- -->
- <property name="severity" value="error"/>
- </module>
-
</module>
</module>