This is an automated email from the ASF dual-hosted git repository.
cgivre pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git
The following commit(s) were added to refs/heads/master by this push:
new 18110e0 DRILL-8039: More LGTM Code Cleanup (#2365)
18110e0 is described below
commit 18110e0a8f6e9b1511b31f4458068ea38d8c5731
Author: Charles S. Givre <[email protected]>
AuthorDate: Tue Nov 9 09:35:53 2021 -0500
DRILL-8039: More LGTM Code Cleanup (#2365)
* Initial Commit
* Fixed Spurious Javadocs
---
.../java/org/apache/drill/exec/store/hdf5/HDF5BatchReader.java | 2 +-
.../apache/drill/exec/store/hdf5/writers/HDF5ByteDataWriter.java | 2 +-
.../drill/exec/store/hdf5/writers/HDF5DoubleDataWriter.java | 2 +-
.../apache/drill/exec/store/hdf5/writers/HDF5FloatDataWriter.java | 2 +-
.../apache/drill/exec/store/hdf5/writers/HDF5IntDataWriter.java | 2 +-
.../apache/drill/exec/store/hdf5/writers/HDF5LongDataWriter.java | 2 +-
.../drill/exec/store/hdf5/writers/HDF5SmallIntDataWriter.java | 2 +-
.../drill/exec/store/hdf5/writers/HDF5TimestampDataWriter.java | 2 +-
.../java/org/apache/drill/exec/store/mapr/db/MapRDBGroupScan.java | 2 +-
.../apache/drill/exec/store/mapr/db/json/JsonTableGroupScan.java | 2 +-
.../main/java/org/apache/drill/exec/store/xml/XMLBatchReader.java | 2 +-
.../main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java | 3 ++-
.../apache/drill/exec/store/kafka/decoders/AvroMessageReader.java | 2 +-
.../src/main/java/org/apache/drill/exec/udfs/CryptoFunctions.java | 6 +++---
.../src/main/java/org/apache/drill/exec/client/DrillClient.java | 2 +-
.../drill/exec/expr/fn/impl/CollectListMapsAggFunction.java | 2 +-
.../org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java | 8 ++++----
.../java/org/apache/drill/exec/expr/fn/impl/SchemaFunctions.java | 4 ++--
.../java/org/apache/drill/exec/physical/impl/join/JoinUtils.java | 2 +-
.../apache/drill/exec/physical/impl/validate/BatchValidator.java | 2 +-
.../exec/planner/fragment/contrib/SplittingParallelizer.java | 2 +-
.../drill/exec/planner/index/rules/DbScanSortRemovalRule.java | 4 ++--
.../src/main/java/org/apache/drill/exec/rpc/BitRpcUtility.java | 2 +-
.../main/java/org/apache/drill/exec/rpc/control/Controller.java | 5 -----
.../main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java | 4 ++--
.../src/main/java/org/apache/drill/exec/rpc/user/UserServer.java | 2 +-
.../java/org/apache/drill/exec/store/avro/AvroBatchReader.java | 6 +++---
.../drill/exec/store/easy/json/JsonStatisticsRecordWriter.java | 4 ++--
.../apache/drill/exec/store/easy/json/loader/JsonLoaderImpl.java | 2 +-
.../main/java/org/apache/drill/exec/store/log/LogBatchReader.java | 4 ++--
.../drill/exec/store/sys/store/ZookeeperPersistentStore.java | 2 +-
.../org/apache/drill/exec/work/batch/ControlMessageHandler.java | 2 +-
.../src/main/java/org/apache/drill/jdbc/impl/DrillCursor.java | 2 +-
.../org/apache/drill/exec/memory/AllocatorClosedException.java | 2 --
exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicClient.java | 2 +-
exec/rpc/src/main/java/org/apache/drill/exec/rpc/RpcBus.java | 2 +-
.../java/org/apache/drill/exec/rpc/SaslEncryptionHandler.java | 6 +++---
37 files changed, 50 insertions(+), 56 deletions(-)
diff --git
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/HDF5BatchReader.java
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/HDF5BatchReader.java
index 6979dbb..75c6f6d 100644
---
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/HDF5BatchReader.java
+++
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/HDF5BatchReader.java
@@ -622,7 +622,7 @@ public class HDF5BatchReader implements
ManagedReader<FileSchemaNegotiator> {
} catch (Exception e) {
throw UserException
.dataReadError()
- .message("Error writing Compound Field: {}", e.getMessage())
+ .message("Error writing Compound Field: " + e.getMessage())
.addContext(errorContext)
.build(logger);
}
diff --git
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5ByteDataWriter.java
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5ByteDataWriter.java
index dabc6ab..d66d6dd 100644
---
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5ByteDataWriter.java
+++
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5ByteDataWriter.java
@@ -68,7 +68,7 @@ public class HDF5ByteDataWriter extends HDF5DataWriter {
if (counter > data.length) {
return false;
} else {
- colWriter.setInt(data[counter++]);
+ colWriter.setInt(data[counter++]); //lgtm [java/index-out-of-bounds]
return true;
}
}
diff --git
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5DoubleDataWriter.java
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5DoubleDataWriter.java
index b7c1c90..93b55c4 100644
---
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5DoubleDataWriter.java
+++
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5DoubleDataWriter.java
@@ -66,7 +66,7 @@ public class HDF5DoubleDataWriter extends HDF5DataWriter {
if (counter > data.length) {
return false;
} else {
- colWriter.setDouble(data[counter++]);
+ colWriter.setDouble(data[counter++]); // lgtm [java/index-out-of-bounds]
return true;
}
}
diff --git
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5FloatDataWriter.java
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5FloatDataWriter.java
index 335c39d..418db01 100644
---
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5FloatDataWriter.java
+++
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5FloatDataWriter.java
@@ -66,7 +66,7 @@ public class HDF5FloatDataWriter extends HDF5DataWriter {
if (counter > data.length) {
return false;
} else {
- colWriter.setDouble(data[counter++]);
+ colWriter.setDouble(data[counter++]); // lgtm [java/index-out-of-bounds]
return true;
}
}
diff --git
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5IntDataWriter.java
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5IntDataWriter.java
index ba49a3c..ef0de85 100644
---
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5IntDataWriter.java
+++
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5IntDataWriter.java
@@ -67,7 +67,7 @@ public class HDF5IntDataWriter extends HDF5DataWriter {
if (counter > data.length) {
return false;
} else {
- colWriter.setInt(data[counter++]);
+ colWriter.setInt(data[counter++]); // lgtm [java/index-out-of-bounds]
return true;
}
}
diff --git
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5LongDataWriter.java
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5LongDataWriter.java
index fadb290..91d218d 100644
---
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5LongDataWriter.java
+++
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5LongDataWriter.java
@@ -66,7 +66,7 @@ public class HDF5LongDataWriter extends HDF5DataWriter {
if (counter > data.length) {
return false;
} else {
- colWriter.setLong(data[counter++]);
+ colWriter.setLong(data[counter++]); // lgtm [java/index-out-of-bounds]
return true;
}
}
diff --git
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5SmallIntDataWriter.java
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5SmallIntDataWriter.java
index fbda008..2f999bd 100644
---
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5SmallIntDataWriter.java
+++
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5SmallIntDataWriter.java
@@ -68,7 +68,7 @@ public class HDF5SmallIntDataWriter extends HDF5DataWriter {
if (counter > data.length) {
return false;
} else {
- colWriter.setInt(data[counter++]);
+ colWriter.setInt(data[counter++]); // lgtm [java/index-out-of-bounds]
return true;
}
}
diff --git
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5TimestampDataWriter.java
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5TimestampDataWriter.java
index 9517028..bdec31a 100644
---
a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5TimestampDataWriter.java
+++
b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/writers/HDF5TimestampDataWriter.java
@@ -45,7 +45,7 @@ public class HDF5TimestampDataWriter extends HDF5DataWriter {
if (counter > data.length) {
return false;
} else {
- colWriter.setTimestamp(Instant.ofEpochMilli(data[counter++]));
+ colWriter.setTimestamp(Instant.ofEpochMilli(data[counter++])); // lgtm
[java/index-out-of-bounds]
return true;
}
}
diff --git
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBGroupScan.java
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBGroupScan.java
index 1f0b626..126326e 100644
---
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBGroupScan.java
+++
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBGroupScan.java
@@ -251,7 +251,7 @@ public abstract class MapRDBGroupScan extends
AbstractDbGroupScan {
/* no slot should be empty at this point */
assert (minHeap.peek() == null || minHeap.peek().size() > 0) :
String.format(
- "Unable to assign tasks to some endpoints.\nEndpoints: {}.\nAssignment
Map: {}.",
+ "Unable to assign tasks to some endpoints.\nEndpoints: %s.\nAssignment
Map: %s.",
incomingEndpoints, endpointFragmentMapping.toString());
logger.debug("Built assignment map in {} µs.\nEndpoints: {}.\nAssignment
Map: {}",
diff --git
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/JsonTableGroupScan.java
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/JsonTableGroupScan.java
index 78c2012..67cf7ab 100644
---
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/JsonTableGroupScan.java
+++
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/JsonTableGroupScan.java
@@ -517,7 +517,7 @@ public class JsonTableGroupScan extends MapRDBGroupScan
implements IndexGroupSca
}
logger.debug("index_plan_info: getEstimatedRowCount obtained from DB
Client for {}: indexName: {}, indexInfo: {}, " +
"avgRowSize: {}, estimatedSize {}", this, (indexDesc == null ?
"null" : indexDesc.getIndexName()),
- (indexDesc == null ? "null" : indexDesc.getIndexInfo()), avgRowSize);
+ (indexDesc == null ? "null" : indexDesc.getIndexInfo()), avgRowSize,
fullTableEstimatedSize);
return new MapRDBStatisticsPayload(ROWCOUNT_UNKNOWN, ROWCOUNT_UNKNOWN,
avgRowSize);
}
diff --git
a/contrib/format-xml/src/main/java/org/apache/drill/exec/store/xml/XMLBatchReader.java
b/contrib/format-xml/src/main/java/org/apache/drill/exec/store/xml/XMLBatchReader.java
index 1ed50b8..efe1c0d 100644
---
a/contrib/format-xml/src/main/java/org/apache/drill/exec/store/xml/XMLBatchReader.java
+++
b/contrib/format-xml/src/main/java/org/apache/drill/exec/store/xml/XMLBatchReader.java
@@ -91,7 +91,7 @@ public class XMLBatchReader implements
ManagedReader<FileSchemaNegotiator> {
} catch (Exception e) {
throw UserException
.dataReadError(e)
- .message("Failed to open input file: {}", split.getPath().toString())
+ .message(String.format("Failed to open input file: %s",
split.getPath().toString()))
.addContext(errorContext)
.addContext(e.getMessage())
.build(logger);
diff --git
a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
index a45efbf..89e1623 100644
---
a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
+++
b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.store.hbase;
import java.io.IOException;
import java.nio.charset.CharacterCodingException;
+import java.util.Arrays;
import java.util.List;
import org.apache.drill.common.exceptions.DrillRuntimeException;
@@ -76,7 +77,7 @@ public class HBaseUtils {
FilterProtos.Filter pbFilter =
FilterProtos.Filter.parseFrom(filterBytes);
return ProtobufUtil.toFilter(pbFilter);
} catch (Exception e) {
- throw new DrillRuntimeException("Error deserializing filter: " +
filterBytes, e);
+ throw new DrillRuntimeException("Error deserializing filter: " +
Arrays.toString(filterBytes), e);
}
}
diff --git
a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/decoders/AvroMessageReader.java
b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/decoders/AvroMessageReader.java
index 8179226..8413815 100644
---
a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/decoders/AvroMessageReader.java
+++
b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/decoders/AvroMessageReader.java
@@ -79,7 +79,7 @@ public class AvroMessageReader implements MessageReader {
if (Schema.Type.RECORD != schema.getType()) {
throw UserException.dataReadError()
- .message("Root object must be record type. Found: %s",
schema.getType())
+ .message(String.format("Root object must be record type. Found: %s",
schema.getType()))
.addContext("Reader", this)
.build(logger);
}
diff --git
a/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/CryptoFunctions.java
b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/CryptoFunctions.java
index f914fb9..e704089 100644
--- a/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/CryptoFunctions.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/CryptoFunctions.java
@@ -285,7 +285,7 @@ public class CryptoFunctions {
keyByteArray = java.util.Arrays.copyOf(keyByteArray, 16);
javax.crypto.spec.SecretKeySpec secretKey = new
javax.crypto.spec.SecretKeySpec(keyByteArray, "AES");
- javax.crypto.Cipher cipher =
javax.crypto.Cipher.getInstance("AES/ECB/PKCS5Padding");
+ javax.crypto.Cipher cipher =
javax.crypto.Cipher.getInstance("AES/ECB/PKCS5Padding"); // lgtm
[java/weak-cryptographic-algorithm]
cipher.init(javax.crypto.Cipher.ENCRYPT_MODE, secretKey);
encryptedText =
javax.xml.bind.DatatypeConverter.printBase64Binary(cipher.doFinal(input.getBytes(java.nio.charset.StandardCharsets.UTF_8)));
} catch (Exception e) {
@@ -336,7 +336,7 @@ public class CryptoFunctions {
keyByteArray = java.util.Arrays.copyOf(keyByteArray, 16);
javax.crypto.spec.SecretKeySpec secretKey = new
javax.crypto.spec.SecretKeySpec(keyByteArray, "AES");
- javax.crypto.Cipher cipher =
javax.crypto.Cipher.getInstance("AES/ECB/PKCS5Padding");
+ javax.crypto.Cipher cipher =
javax.crypto.Cipher.getInstance("AES/ECB/PKCS5Padding"); // lgtm
[java/weak-cryptographic-algorithm]
cipher.init(javax.crypto.Cipher.DECRYPT_MODE, secretKey);
decryptedText = new
String(cipher.doFinal(javax.xml.bind.DatatypeConverter.parseBase64Binary(input)));
} catch (Exception e) {
@@ -351,4 +351,4 @@ public class CryptoFunctions {
}
-}
\ No newline at end of file
+}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
index e5562dd..49375c9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
@@ -620,7 +620,7 @@ public class DrillClient implements Closeable,
ConnectionThrottle {
try {
jsonArray.add(objectMapper.readTree(fragment.getFragmentJson()));
} catch (IOException e) {
- logger.error("Exception while trying to read PlanFragment JSON for
%s", fragment.getHandle().getQueryId(), e);
+ logger.error("Exception while trying to read PlanFragment JSON for
{}", fragment.getHandle().getQueryId(), e);
throw new RpcException(e);
}
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CollectListMapsAggFunction.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CollectListMapsAggFunction.java
index c232edc..f88f0e4 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CollectListMapsAggFunction.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CollectListMapsAggFunction.java
@@ -60,7 +60,7 @@ public class CollectListMapsAggFunction implements
DrillAggFunc {
for (int i = 0; i < inputs.length; i += 2) {
org.apache.drill.exec.vector.complex.MapUtility.writeToMapFromReader(
- inputs[i + 1], mapWriter, inputs[i].readText().toString(),
"CollectListMapsAggFunction");
+ inputs[i + 1], mapWriter, inputs[i].readText().toString(),
"CollectListMapsAggFunction"); // lgtm [java/index-out-of-bounds]
}
mapWriter.end();
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
index 523869a..b639de8 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
@@ -466,10 +466,10 @@ public class DateTypeFunctions {
@Override
public void eval() {
- if (1 == 1) {
- throw new UnsupportedOperationException(
- "date_part function should be rewritten as extract() functions");
- }
+ // TODO: We should fix this so that the function is rewritten for the
user rather than
+ // throwing an exception. This is poor design.
+ throw new UnsupportedOperationException(
+ "date_part function should be rewritten as extract() functions");
}
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SchemaFunctions.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SchemaFunctions.java
index 24f0bc4..7bce81b 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SchemaFunctions.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SchemaFunctions.java
@@ -68,7 +68,7 @@ public class SchemaFunctions {
// Janino cannot infer type
org.apache.drill.exec.record.MaterializedField materializedField =
(org.apache.drill.exec.record.MaterializedField)
columns.get(columnName);
- org.apache.drill.common.types.TypeProtos.MajorType type = inputs[i +
1].getType();
+ org.apache.drill.common.types.TypeProtos.MajorType type = inputs[i +
1].getType(); // lgtm [java/index-out-of-bounds]
if (materializedField != null &&
!materializedField.getType().equals(type)) {
org.apache.drill.common.types.TypeProtos.MinorType
leastRestrictiveType =
org.apache.drill.exec.resolver.TypeCastRules.getLeastRestrictiveType(
@@ -85,7 +85,7 @@ public class SchemaFunctions {
columns.put(columnName, clone);
} else {
if (type.getMinorType() ==
org.apache.drill.common.types.TypeProtos.MinorType.MAP) {
- columns.put(columnName, inputs[i + 1].getField());
+ columns.put(columnName, inputs[i + 1].getField()); // lgtm
[java/index-out-of-bounds]
} else {
columns.put(columnName,
org.apache.drill.exec.record.MaterializedField.create(columnName, type));
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
index f3e8dc0..fc5ca52 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
@@ -85,7 +85,7 @@ public class JoinUtils {
return Comparator.IS_NOT_DISTINCT_FROM;
}
throw UserException.unsupportedError()
- .message("Invalid comparator supplied to this join: ",
condition.getRelationship())
+ .message("Invalid comparator supplied to this join: " +
condition.getRelationship())
.build(logger);
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
index 5134f42..b79c089 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
@@ -462,7 +462,7 @@ public class BatchValidator {
for (MinorType type : vector.getSubTypes()) {
if (type == MinorType.LATE) {
error(name, vector, String.format(
- "Union vector includes illegal type LATE",
+ "Union vector includes illegal type LATE %s",
type.name()));
continue;
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/contrib/SplittingParallelizer.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/contrib/SplittingParallelizer.java
index cd11281..11e0a27 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/contrib/SplittingParallelizer.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/fragment/contrib/SplittingParallelizer.java
@@ -222,7 +222,7 @@ public class SplittingParallelizer extends
DefaultQueryParallelizer {
if (logger.isDebugEnabled()) {
logger.debug("Remote fragment:\n {}",
DrillStringUtils.unescapeJava(fragment.toString()));
}
- throw new ForemanSetupException(String.format("There should not be
non-root/remote fragment present in plan split, but there is:",
+ throw new ForemanSetupException(String.format("There should not be
non-root/remote fragment present in plan split, but there is: %s",
DrillStringUtils.unescapeJava(fragment.toString())));
}
// fragments should be always empty here
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/rules/DbScanSortRemovalRule.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/rules/DbScanSortRemovalRule.java
index 42c525b..dc4054c 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/rules/DbScanSortRemovalRule.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/index/rules/DbScanSortRemovalRule.java
@@ -197,7 +197,7 @@ public class DbScanSortRemovalRule extends Prule {
if (planGen.convertChild() != null) {
indexContext.getCall().transformTo(planGen.convertChild());
} else {
- logger.debug("Not able to generate index plan in ",
this.getClass().toString());
+ logger.debug("Not able to generate index plan in {}",
this.getClass().toString());
}
} catch (Exception e) {
logger.warn("Exception while trying to generate indexscan to remove
sort", e);
@@ -221,7 +221,7 @@ public class DbScanSortRemovalRule extends Prule {
indexContext.exch != null);
if (finalRel == null) {
- logger.debug("Not able to generate index plan in ",
this.getClass().toString());
+ logger.debug("Not able to generate index plan in {}",
this.getClass().toString());
return;
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BitRpcUtility.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BitRpcUtility.java
index 57f39fe..58bdd9b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BitRpcUtility.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BitRpcUtility.java
@@ -96,7 +96,7 @@ public final class BitRpcUtility {
client.startSaslHandshake(connectionHandler,
config.getSaslClientProperties(endpoint, saslProperties),
ugi, factory, saslRpcType);
} catch (final IOException e) {
- logger.error("Failed while doing setup for starting sasl handshake for
connection", connection.getName());
+ logger.error("Failed while doing setup for starting sasl handshake for
connection {}", connection.getName());
final Exception ex = new RpcException(String.format("Failed to initiate
authentication to %s",
endpoint.getAddress()), e);
connectionHandler.connectionFailed(RpcConnectionHandler.FailureType.AUTHENTICATION,
ex);
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/Controller.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/Controller.java
index 90de9af..040dd3a 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/Controller.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/Controller.java
@@ -129,11 +129,6 @@ public interface Controller extends AutoCloseable {
/**
* Interface for defining how to serialize and deserialize custom message
for consumer who want to use something other
* than Protobuf messages.
- *
- * @param <SEND>
- * The class that is expected to be sent.
- * @param <RECEIVE>
- * The class that is expected to received.
*/
public interface CustomSerDe<MSG> {
public byte[] serializeToSend(MSG send);
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java
index 7008f73..a05ceb0 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/control/WorkEventBus.java
@@ -71,7 +71,7 @@ public class WorkEventBus {
final FragmentManager old =
managers.putIfAbsent(fragmentManager.getHandle(), fragmentManager);
if (old != null) {
throw new IllegalStateException(
- String.format("Manager {} for fragment {} already exists.", old,
QueryIdHelper.getQueryIdentifier(fragmentManager.getHandle())));
+ String.format("Manager %s for fragment %s already exists.", old,
QueryIdHelper.getQueryIdentifier(fragmentManager.getHandle())));
}
}
@@ -90,7 +90,7 @@ public class WorkEventBus {
public boolean removeFragmentManager(final FragmentHandle handle, final
boolean cancel) {
final FragmentManager manager = managers.remove(handle);
if (manager != null) {
- assert !manager.isCancelled() : String.format("Fragment {} manager {} is
already cancelled.", QueryIdHelper.getQueryIdentifier(handle), manager);
+ assert !manager.isCancelled() : String.format("Fragment %s manager %s is
already cancelled.", QueryIdHelper.getQueryIdentifier(handle), manager);
if (cancel) {
manager.cancel();
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
index 892636c..ce65218 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
@@ -366,7 +366,7 @@ public class UserServer extends BasicServer<RpcType,
BitToUserConnection> {
// if timeout is unsupported or is set to false, disable timeout.
if (!inbound.hasSupportTimeout() || !inbound.getSupportTimeout()) {
connection.disableReadTimeout();
- logger.warn("Timeout Disabled as client doesn't support it.",
connection.getName());
+ logger.warn("Timeout Disabled as client {} doesn't support it.",
connection.getName());
}
BitToUserHandshake.Builder respBuilder =
BitToUserHandshake.newBuilder()
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroBatchReader.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroBatchReader.java
index ec47678..ca587ae 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroBatchReader.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroBatchReader.java
@@ -149,7 +149,7 @@ public class AvroBatchReader implements
ManagedReader<FileScanFramework.FileSche
} catch (IOException | InterruptedException e) {
throw UserException.dataReadError(e)
.message("Error preparing Avro reader")
- .addContext("Reader", this)
+ .addContext(String.format("Reader: %s", this))
.build(logger);
}
}
@@ -166,7 +166,7 @@ public class AvroBatchReader implements
ManagedReader<FileScanFramework.FileSche
record = reader.next(record);
} catch (IOException e) {
throw UserException.dataReadError(e)
- .addContext("Reader", this)
+ .addContext(String.format("Reader %s", this))
.build(logger);
}
@@ -175,7 +175,7 @@ public class AvroBatchReader implements
ManagedReader<FileScanFramework.FileSche
if (Schema.Type.RECORD != schema.getType()) {
throw UserException.dataReadError()
.message("Root object must be record type. Found: %s",
schema.getType())
- .addContext("Reader", this)
+ .addContext(String.format("Reader %s", this))
.build(logger);
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonStatisticsRecordWriter.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonStatisticsRecordWriter.java
index 8858e34..0aef09a 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonStatisticsRecordWriter.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonStatisticsRecordWriter.java
@@ -72,7 +72,7 @@ public class JsonStatisticsRecordWriter extends
JSONBaseStatisticsRecordWriter i
}
} catch (IOException ex) {
throw UserException.dataWriteError(ex)
- .addContext("Unable to delete tmp statistics file", fileName)
+ .addContext(String.format("Unable to delete tmp statistics file %s",
fileName))
.build(logger);
}
try {
@@ -84,7 +84,7 @@ public class JsonStatisticsRecordWriter extends
JSONBaseStatisticsRecordWriter i
logger.debug("Created file: {}", fileName);
} catch (IOException ex) {
throw UserException.dataWriteError(ex)
- .addContext("Unable to create stistics file", fileName)
+ .addContext(String.format("Unable to create stistics file %s",
fileName))
.build(logger);
}
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/JsonLoaderImpl.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/JsonLoaderImpl.java
index 586e7bb..b6df9ae 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/JsonLoaderImpl.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/JsonLoaderImpl.java
@@ -399,7 +399,7 @@ public class JsonLoaderImpl implements JsonLoader,
ErrorFactory {
} else {
throw buildError(
UserException.dataReadError()
- .message("Syntax error on token", token.toString()));
+ .message("Syntax error on token: " + token.toString()));
}
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogBatchReader.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogBatchReader.java
index b808ffe..1fd1ac3 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogBatchReader.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogBatchReader.java
@@ -196,7 +196,7 @@ public class LogBatchReader implements
ManagedReader<FileSchemaNegotiator> {
throw UserException
.dataReadError(e)
.message("Failed to open input file")
- .addContext("File path:", split.getPath())
+ .addContext(String.format("File path: %s", split.getPath()))
.addContext(loader.errorContext())
.build(logger);
}
@@ -226,7 +226,7 @@ public class LogBatchReader implements
ManagedReader<FileSchemaNegotiator> {
throw UserException
.dataReadError(e)
.message("Error reading file")
- .addContext("File", split.getPath())
+ .addContext(String.format("File: %s", split.getPath()))
.addContext(loader.errorContext())
.build(logger);
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/store/ZookeeperPersistentStore.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/store/ZookeeperPersistentStore.java
index c3c993c..9af6a7f 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/store/ZookeeperPersistentStore.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/store/ZookeeperPersistentStore.java
@@ -151,7 +151,7 @@ public class ZookeeperPersistentStore<V> extends
BasePersistentStore<V> implemen
try{
client.close();
} catch(final Exception e) {
- logger.warn("Failure while closing out %s.", getClass().getSimpleName(),
e);
+ logger.warn("Failure while closing out {}: {}",
getClass().getSimpleName(), e);
}
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/ControlMessageHandler.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/ControlMessageHandler.java
index 963f53a..bee1676 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/ControlMessageHandler.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/work/batch/ControlMessageHandler.java
@@ -142,7 +142,7 @@ public class ControlMessageHandler implements
RequestHandler<ControlConnection>
*/
public void startNewFragment(final PlanFragment fragment, final
DrillbitContext drillbitContext)
throws UserRpcException {
- logger.debug("Received remote fragment start instruction", fragment);
+ logger.debug("Received remote fragment start instruction: {}", fragment);
try {
final FragmentContextImpl fragmentContext = new
FragmentContextImpl(drillbitContext, fragment,
diff --git
a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillCursor.java
b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillCursor.java
index 6a6aaf8..bdadb8f 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillCursor.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/impl/DrillCursor.java
@@ -170,7 +170,7 @@ public class DrillCursor implements Cursor {
@Override
public void submissionFailed(UserException ex) {
- logger.debug("Received query failure:", instanceId, ex);
+ logger.debug("Received query failure: {} {}", instanceId, ex);
this.executionFailureException = ex;
completed = true;
close();
diff --git
a/exec/memory/base/src/main/java/org/apache/drill/exec/memory/AllocatorClosedException.java
b/exec/memory/base/src/main/java/org/apache/drill/exec/memory/AllocatorClosedException.java
index f07bf55..2268e4d 100644
---
a/exec/memory/base/src/main/java/org/apache/drill/exec/memory/AllocatorClosedException.java
+++
b/exec/memory/base/src/main/java/org/apache/drill/exec/memory/AllocatorClosedException.java
@@ -20,8 +20,6 @@ package org.apache.drill.exec.memory;
/**
* Exception thrown when a closed BufferAllocator is used. Note
* this is an unchecked exception.
- *
- * @param message string associated with the cause
*/
@SuppressWarnings("serial")
public class AllocatorClosedException extends RuntimeException {
diff --git a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicClient.java
b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicClient.java
index 612dad6..30be9c7 100644
--- a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicClient.java
+++ b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicClient.java
@@ -237,7 +237,7 @@ public abstract class BasicClient<T extends EnumLite, CC
extends ClientConnectio
}
connection.setSaslClient(saslClient);
} catch (final SaslException e) {
- logger.error("Failed while creating SASL client for SASL handshake for
connection", connection.getName());
+ logger.error("Failed while creating SASL client for SASL handshake for
connection: {}", connection.getName());
connectionHandler.connectionFailed(RpcConnectionHandler.FailureType.AUTHENTICATION,
e);
return;
}
diff --git a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
index 9ade2f2..944e854 100644
--- a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
+++ b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
@@ -308,7 +308,7 @@ public abstract class RpcBus<T extends EnumLite, C extends
RemoteConnection> imp
DrillPBError failure = DrillPBError.parseFrom(new
ByteBufInputStream(msg.pBody, msg.pBody.readableBytes()));
connection.recordRemoteFailure(msg.coordinationId, failure);
if (RpcConstants.EXTRA_DEBUGGING) {
- logger.debug("Updated rpc future with coordinationId {} with
failure ", msg.coordinationId, failure);
+ logger.debug("Updated rpc future with coordinationId {} with
failure {}", msg.coordinationId, failure);
}
break;
}
diff --git
a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/SaslEncryptionHandler.java
b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/SaslEncryptionHandler.java
index 2587f04..0670fe7 100644
---
a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/SaslEncryptionHandler.java
+++
b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/SaslEncryptionHandler.java
@@ -125,8 +125,8 @@ class SaslEncryptionHandler extends
MessageToMessageEncoder<ByteBuf> {
component.getBytes(component.readerIndex(), origMsg, 0,
component.readableBytes());
//}
- if(logger.isTraceEnabled()) {
- logger.trace("Trying to encrypt chunk of size:{} with
wrapSizeLimit:{} and chunkMode: {}",
+ if (logger.isTraceEnabled()) {
+ logger.trace("Trying to encrypt chunk of size:{} with
wrapSizeLimit:{}",
component.readableBytes(), wrapSizeLimit);
}
@@ -168,7 +168,7 @@ class SaslEncryptionHandler extends
MessageToMessageEncoder<ByteBuf> {
msg.resetReaderIndex();
outOfMemoryHandler.handle();
} catch (IOException e) {
- logger.error("Something went wrong while wrapping the message: {} with
MaxRawWrapSize: {}, ChunkMode: {} " +
+ logger.error("Something went wrong while wrapping the message: {} with
MaxRawWrapSize: {}, " +
"and error: {}", msg, wrapSizeLimit, e.getMessage());
throw e;
}