Skip to content

Commit

Permalink
[FLINK-8126] [build] Fix and update checkstyle
Browse files Browse the repository at this point in the history
Update to the latest checkstyle version and fix the errors not
previously detected.

This closes apache#5044.
  • Loading branch information
greghogan authored and zentol committed Nov 22, 2017
1 parent 7c07d6d commit c6879cd
Show file tree
Hide file tree
Showing 31 changed files with 31 additions and 59 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,10 @@
import org.apache.flink.util.Preconditions;

import org.apache.commons.lang3.time.StopWatch;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,10 @@
import org.apache.flink.util.Preconditions;

import org.apache.commons.lang3.time.StopWatch;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;

import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Assume;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ public FlinkKafkaProducer08(String topicId, KeyedSerializationSchema<IN> seriali
*
* @deprecated This is a deprecated constructor that does not correctly handle partitioning when
* producing to multiple topics. Use
* {@link FlinkKafkaProducer08(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead.
* {@link #FlinkKafkaProducer08(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead.
*/
@Deprecated
public FlinkKafkaProducer08(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner<IN> customPartitioner) {
Expand All @@ -155,7 +155,7 @@ public FlinkKafkaProducer08(String topicId, SerializationSchema<IN> serializatio
*
* @deprecated This is a deprecated constructor that does not correctly handle partitioning when
* producing to multiple topics. Use
* {@link FlinkKafkaProducer08(String, KeyedSerializationSchema, Properties, FlinkKafkaPartitioner)} instead.
* {@link #FlinkKafkaProducer08(String, KeyedSerializationSchema, Properties, FlinkKafkaPartitioner)} instead.
*/
@Deprecated
public FlinkKafkaProducer08(String topicId, KeyedSerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner<IN> customPartitioner) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.consumer.SimpleConsumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.Node;
import org.slf4j.Logger;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import org.apache.flink.util.NetUtils;

import org.apache.kafka.clients.consumer.ConsumerConfig;

import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Matchers;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ public FlinkKafkaProducer09(String topicId, KeyedSerializationSchema<IN> seriali
*
* @deprecated This is a deprecated constructor that does not correctly handle partitioning when
* producing to multiple topics. Use
* {@link FlinkKafkaProducer09(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead.
* {@link #FlinkKafkaProducer09(String, SerializationSchema, Properties, FlinkKafkaPartitioner)} instead.
*/
@Deprecated
public FlinkKafkaProducer09(String topicId, SerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner<IN> customPartitioner) {
Expand All @@ -160,7 +160,7 @@ public FlinkKafkaProducer09(String topicId, SerializationSchema<IN> serializatio
*
* @deprecated This is a deprecated constructor that does not correctly handle partitioning when
* producing to multiple topics. Use
* {@link FlinkKafkaProducer09(String, org.apache.flink.streaming.util.serialization.KeyedDeserializationSchema, Properties, FlinkKafkaPartitioner)} instead.
* {@link #FlinkKafkaProducer09(String, org.apache.flink.streaming.util.serialization.KeyedDeserializationSchema, Properties, FlinkKafkaPartitioner)} instead.
*/
@Deprecated
public FlinkKafkaProducer09(String topicId, KeyedSerializationSchema<IN> serializationSchema, Properties producerConfig, KafkaPartitioner<IN> customPartitioner) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@
import org.apache.flink.util.SerializedValue;

import org.apache.commons.collections.map.LinkedMap;

import org.junit.Assert;
import org.junit.Test;
import org.mockito.Matchers;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@

import org.apache.avro.Schema;
import org.apache.avro.specific.SpecificRecordBase;

import org.junit.Test;

import java.sql.Timestamp;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import org.apache.flink.streaming.util.serialization.KeyedSerializationSchema;

import kafka.server.KafkaServer;

import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.util.ArrayList;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
import com.amazonaws.services.kinesis.model.HashKeyRange;
import com.amazonaws.services.kinesis.model.SequenceNumberRange;
import com.amazonaws.services.kinesis.model.Shard;

import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@
import com.amazonaws.services.kinesis.producer.KinesisProducer;
import com.amazonaws.services.kinesis.producer.KinesisProducerConfiguration;
import com.amazonaws.services.kinesis.producer.UserRecordResult;

import com.google.common.util.concurrent.SettableFuture;

import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import org.apache.flink.streaming.connectors.kinesis.config.ProducerConfigConstants;

import com.amazonaws.services.kinesis.producer.KinesisProducerConfiguration;

import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ public void testRocksDbMergePerformance() throws Exception {
.setCreateIfMissing(true)
.setMergeOperatorName(RocksDBKeyedStateBackend.MERGE_OPERATOR_NAME);

final WriteOptions write_options = new WriteOptions()
final WriteOptions writeOptions = new WriteOptions()
.setSync(false)
.setDisableWAL(true);

Expand All @@ -88,7 +88,7 @@ public void testRocksDbMergePerformance() throws Exception {

final long beginInsert = System.nanoTime();
for (int i = 0; i < num; i++) {
rocksDB.merge(write_options, keyBytes, valueBytes);
rocksDB.merge(writeOptions, keyBytes, valueBytes);
}
final long endInsert = System.nanoTime();
log.info("end insert - duration: {} ms", (endInsert - beginInsert) / 1_000_000);
Expand Down Expand Up @@ -154,7 +154,7 @@ public void testRocksDbRangeGetPerformance() throws Exception {
.setCreateIfMissing(true)
.setMergeOperatorName(RocksDBKeyedStateBackend.MERGE_OPERATOR_NAME);

final WriteOptions write_options = new WriteOptions()
final WriteOptions writeOptions = new WriteOptions()
.setSync(false)
.setDisableWAL(true);

Expand All @@ -170,7 +170,7 @@ public void testRocksDbRangeGetPerformance() throws Exception {
final long beginInsert = System.nanoTime();
for (int i = 0; i < num; i++) {
unsafe.putInt(keyTemplate, offset, i);
rocksDB.put(write_options, keyTemplate, valueBytes);
rocksDB.put(writeOptions, keyTemplate, valueBytes);
}
final long endInsert = System.nanoTime();
log.info("end insert - duration: {} ms", (endInsert - beginInsert) / 1_000_000);
Expand Down
2 changes: 1 addition & 1 deletion flink-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ under the License.
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>6.19</version>
<version>8.4</version>
</dependency>
</dependencies>
<executions>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.PositionedReadable;
import org.apache.hadoop.fs.Seekable;

import org.junit.Assert;
import org.junit.Test;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.apache.flink.runtime.util.HadoopUtils;

import org.apache.hadoop.fs.s3a.S3AFileSystem;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.apache.flink.runtime.util.HadoopUtils;

import com.facebook.presto.hive.PrestoS3FileSystem;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.services.s3.AmazonS3Client;

import com.facebook.presto.hive.PrestoS3FileSystem;

import org.junit.Test;

import java.lang.reflect.Field;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,10 @@
import org.apache.flink.util.Preconditions;

import com.esotericsoftware.kryo.Kryo;

import org.apache.avro.generic.GenericData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.util.Utf8;

import org.objenesis.strategy.StdInstantiatorStrategy;

import java.io.IOException;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,16 @@
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.Utils;
import org.apache.flink.api.java.operators.DataSource;
//CHECKSTYLE.OFF: AvoidStarImport|ImportOrder
import org.apache.flink.api.java.tuple.*;
//CHECKSTYLE.ON: AvoidStarImport|ImportOrder
import org.apache.flink.api.java.typeutils.PojoTypeInfo;
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
import org.apache.flink.api.java.typeutils.TypeExtractor;
import org.apache.flink.core.fs.Path;
import org.apache.flink.util.Preconditions;

//CHECKSTYLE.OFF: AvoidStarImport|ImportOrder
import org.apache.flink.api.java.tuple.*;
//CHECKSTYLE.ON: AvoidStarImport|ImportOrder

import java.util.ArrayList;
import java.util.Arrays;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import org.apache.flink.util.FlinkException;

import org.apache.mesos.Protos;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
2 changes: 1 addition & 1 deletion flink-optimizer/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ under the License.
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>6.19</version>
<version>8.4</version>
</dependency>
</dependencies>
<executions>
Expand Down
2 changes: 1 addition & 1 deletion flink-runtime/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ under the License.
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>6.19</version>
<version>8.4</version>
</dependency>
</dependencies>
<executions>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@

import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Registration;

import org.junit.Test;

import java.io.BufferedReader;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.channel.socket.ClientSocketChannelFactory;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.testkit.JavaTestKit;

import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.apache.flink.util.TestLogger;

import org.junit.Test;

import org.reflections.Reflections;

import java.lang.reflect.Modifier;
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1061,7 +1061,7 @@ under the License.
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>6.19</version>
<version>8.4</version>
</dependency>
</dependencies>
<executions>
Expand Down
32 changes: 15 additions & 17 deletions tools/maven/checkstyle.xml
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,6 @@ This file is based on the checkstyle file of Apache Beam.
<!--<property name="fileNamePattern" value=".*Tests\.java$" />-->
<!--</module>-->

<!-- Allow use of comment to suppress javadocstyle -->
<module name="SuppressionCommentFilter">
<property name="offCommentFormat" value="CHECKSTYLE.OFF\: ([\w\|]+)"/>
<property name="onCommentFormat" value="CHECKSTYLE.ON\: ([\w\|]+)"/>
<property name="checkFormat" value="$1"/>
</module>
<module name="SuppressionFilter">
<property name="file" value="${checkstyle.suppressions.file}" default="suppressions.xml" />
</module>
Expand All @@ -90,6 +84,13 @@ This file is based on the checkstyle file of Apache Beam.
<!-- All Java AST specific tests live under TreeWalker module. -->
<module name="TreeWalker">

<!-- Allow use of comment to suppress javadocstyle -->
<module name="SuppressionCommentFilter">
<property name="offCommentFormat" value="CHECKSTYLE.OFF\: ([\w\|]+)"/>
<property name="onCommentFormat" value="CHECKSTYLE.ON\: ([\w\|]+)"/>
<property name="checkFormat" value="$1"/>
</module>

<!--
FLINK CUSTOM CHECKS
Expand Down Expand Up @@ -263,8 +264,8 @@ This file is based on the checkstyle file of Apache Beam.
<property name="allowMissingThrowsTags" value="true"/>
<property name="allowThrowsTagsForSubclasses" value="true"/>
<property name="allowUndeclaredRTE" value="true"/>
<!-- This check sometimes failed for with "Unable to get class information for @throws tag" for custom exceptions -->
<property name="suppressLoadErrors" value="true"/>
<!-- This check sometimes failed for with "Unable to get class information for @throws tag" for custom exceptions -->
<property name="suppressLoadErrors" value="true"/>
</module>

<!-- Check that paragraph tags are used correctly in Javadoc. -->
Expand Down Expand Up @@ -484,15 +485,15 @@ This file is based on the checkstyle file of Apache Beam.
-->

<module name="EmptyLineSeparator">
<!-- Checks for empty line separator between tokens. The only
<!-- Checks for empty line separator between tokens. The only
excluded token is VARIABLE_DEF, allowing class fields to
be declared on consecutive lines.
-->
<property name="allowMultipleEmptyLines" value="false"/>
<property name="allowMultipleEmptyLinesInsideClassMembers" value="false"/>
<property name="tokens" value="PACKAGE_DEF, IMPORT, CLASS_DEF,
INTERFACE_DEF, ENUM_DEF, STATIC_INIT, INSTANCE_INIT, METHOD_DEF,
CTOR_DEF"/>
<property name="allowMultipleEmptyLines" value="false"/>
<property name="allowMultipleEmptyLinesInsideClassMembers" value="false"/>
<property name="tokens" value="PACKAGE_DEF, IMPORT, CLASS_DEF,
INTERFACE_DEF, ENUM_DEF, STATIC_INIT, INSTANCE_INIT, METHOD_DEF,
CTOR_DEF"/>
</module>

<module name="WhitespaceAround">
Expand Down Expand Up @@ -559,9 +560,6 @@ This file is based on the checkstyle file of Apache Beam.
<property name="severity" value="error"/>
</module>

<!-- Required to support SuppressWarningsComment -->
<module name="FileContentsHolder"/>

</module>
</module>

0 comments on commit c6879cd

Please sign in to comment.