This is an automated email from the ASF dual-hosted git repository.

lewismc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nutch.git


The following commit(s) were added to refs/heads/master by this push:
     new f02110f  NUTCH-2633 Fix deprecation warnings when building Nutch 
master branch under JDK 10.0.2+13 (#374)
f02110f is described below

commit f02110f42c53e77450835776cf41f22c23f030ec
Author: Lewis John McGibbney <lewis.mcgibb...@gmail.com>
AuthorDate: Fri Aug 10 17:43:36 2018 -0700

    NUTCH-2633 Fix deprecation warnings when building Nutch master branch under 
JDK 10.0.2+13 (#374)
---
 .../apache/nutch/crawl/AbstractFetchSchedule.java  |  0
 .../apache/nutch/crawl/AdaptiveFetchSchedule.java  |  0
 src/java/org/apache/nutch/crawl/CrawlDatum.java    |  2 +-
 src/java/org/apache/nutch/crawl/CrawlDbMerger.java |  1 -
 src/java/org/apache/nutch/crawl/CrawlDbReader.java |  4 ---
 .../apache/nutch/crawl/DefaultFetchSchedule.java   |  0
 src/java/org/apache/nutch/crawl/FetchSchedule.java |  0
 .../apache/nutch/crawl/FetchScheduleFactory.java   |  2 +-
 .../nutch/crawl/MimeAdaptiveFetchSchedule.java     |  2 +-
 .../org/apache/nutch/crawl/SignatureFactory.java   |  2 +-
 src/java/org/apache/nutch/fetcher/Fetcher.java     |  2 +-
 src/java/org/apache/nutch/hostdb/ReadHostDb.java   |  4 +--
 .../org/apache/nutch/hostdb/ResolverThread.java    |  1 +
 src/java/org/apache/nutch/indexer/CleaningJob.java |  2 ++
 src/java/org/apache/nutch/indexer/IndexWriter.java |  3 ++
 .../org/apache/nutch/indexer/IndexingFilters.java  |  8 -----
 src/java/org/apache/nutch/plugin/Extension.java    | 10 +++++--
 src/java/org/apache/nutch/plugin/Plugin.java       |  3 +-
 src/java/org/apache/nutch/protocol/Content.java    |  0
 src/java/org/apache/nutch/protocol/Protocol.java   |  0
 .../apache/nutch/protocol/ProtocolException.java   |  0
 .../org/apache/nutch/protocol/ProtocolFactory.java |  6 ----
 .../org/apache/nutch/protocol/ProtocolStatus.java  | 34 +++++++++++-----------
 .../nutch/segment/ContentAsTextInputFormat.java    |  1 +
 .../org/apache/nutch/segment/SegmentReader.java    | 14 ++++-----
 .../org/apache/nutch/service/impl/LinkReader.java  | 22 ++++++--------
 .../org/apache/nutch/service/impl/NodeReader.java  | 22 ++++++--------
 .../service/impl/NutchServerPoolExecutor.java      |  1 +
 .../service/model/response/FetchNodeDbInfo.java    |  4 +++
 .../apache/nutch/service/resources/DbResource.java |  3 ++
 src/java/org/apache/nutch/tools/Benchmark.java     |  2 ++
 .../apache/nutch/tools/CommonCrawlDataDumper.java  |  2 +-
 .../apache/nutch/tools/CommonCrawlFormatWARC.java  |  2 --
 src/java/org/apache/nutch/tools/DmozParser.java    | 15 ++--------
 src/java/org/apache/nutch/tools/FileDumper.java    |  2 +-
 .../apache/nutch/tools/arc/ArcSegmentCreator.java  |  1 +
 .../org/apache/nutch/tools/warc/WARCExporter.java  |  1 -
 .../org/apache/nutch/util/AbstractChecker.java     |  2 ++
 .../apache/nutch/util/CrawlCompletionStats.java    |  6 ++--
 .../org/apache/nutch/util/EncodingDetector.java    |  3 ++
 .../nutch/util/GenericWritableConfigurable.java    |  2 +-
 .../apache/nutch/util/domain/DomainStatistics.java |  2 --
 .../apache/nutch/any23/TestAny23ParseFilter.java   | 13 ---------
 .../creativecommons/nutch/TestCCParseFilter.java   |  0
 .../apache/nutch/parse/feed/TestFeedParser.java    | 10 +------
 .../nutch/indexer/basic/BasicIndexingFilter.java   |  6 ----
 .../nutch/indexer/geoip/GeoIPDocumentCreator.java  |  3 +-
 .../nutch/indexer/jexl/JexlIndexingFilter.java     |  2 +-
 .../indexer/links/TestLinksIndexingFilter.java     |  1 -
 .../nutch/indexer/replace/ReplaceIndexer.java      |  2 +-
 .../cloudsearch/CloudSearchIndexWriter.java        |  1 +
 .../nutch/indexwriter/dummy/DummyIndexWriter.java  |  4 ---
 .../elasticrest/ElasticRestIndexWriter.java        |  5 ----
 .../indexwriter/elastic/ElasticIndexWriter.java    |  1 +
 .../elastic/TestElasticIndexWriter.java            |  3 ++
 .../nutch/indexwriter/rabbit/RabbitDocument.java   |  2 ++
 .../indexer/filter/MimeTypeIndexingFilter.java     |  1 +
 .../indexer/filter/MimeTypeIndexingFilterTest.java |  1 -
 .../org/apache/nutch/parse/html/HtmlParser.java    |  1 +
 .../java/org/apache/nutch/parse/swf/SWFParser.java |  4 +--
 .../parse/tika/BoilerpipeExtractorRepository.java  |  2 +-
 .../org/apache/nutch/parse/tika/TikaParser.java    |  2 +-
 .../apache/nutch/parse/tika/TestFeedParser.java    |  7 -----
 .../nutch/parsefilter/regex/RegexParseFilter.java  |  1 -
 .../parsefilter/regex/TestRegexParseFilter.java    |  2 --
 .../org/apache/nutch/protocol/file/FileError.java  |  1 +
 .../apache/nutch/protocol/file/FileResponse.java   |  4 +--
 .../java/org/apache/nutch/protocol/ftp/Ftp.java    |  1 +
 .../org/apache/nutch/protocol/ftp/FtpError.java    |  1 +
 .../org/apache/nutch/protocol/ftp/FtpResponse.java |  8 ++---
 .../nutch/protocol/htmlunit/HttpResponse.java      |  2 ++
 .../java/org/apache/nutch/protocol/http/Http.java  |  0
 .../protocol/httpclient/DummyX509TrustManager.java |  7 -----
 .../org/apache/nutch/protocol/httpclient/Http.java |  2 +-
 .../httpclient/HttpBasicAuthentication.java        |  1 +
 .../protocol/interactiveselenium/HttpResponse.java |  7 ++++-
 .../org/apache/nutch/protocol/okhttp/OkHttp.java   |  0
 .../nutch/scoring/orphan/OrphanScoringFilter.java  |  1 -
 .../nutch/scoring/similarity/cosine/Model.java     |  1 -
 .../subcollection/SubcollectionIndexingFilter.java | 11 -------
 .../urlfilter/ignoreexempt/ExemptionUrlFilter.java |  7 -----
 .../indexer/urlmeta/URLMetaIndexingFilter.java     |  6 ----
 .../scoring/urlmeta/URLMetaScoringFilter.java      |  5 ----
 .../net/urlnormalizer/ajax/AjaxURLNormalizer.java  |  1 -
 .../urlnormalizer/basic/BasicURLNormalizer.java    |  2 +-
 .../protocol/TestProtocolURLNormalizer.java        |  2 --
 .../querystring/QuerystringURLNormalizer.java      |  6 ----
 .../querystring/TestQuerystringURLNormalizer.java  |  2 --
 .../slash/TestSlashURLNormalizer.java              |  2 --
 .../nutch/crawl/CrawlDbUpdateTestDriver.java       | 18 ------------
 .../org/apache/nutch/crawl/CrawlDbUpdateUtil.java  |  1 -
 .../org/apache/nutch/crawl/TestCrawlDbMerger.java  |  1 -
 .../segment/TestSegmentMergerCrawlDatums.java      | 24 +++++++--------
 .../org/apache/nutch/service/TestNutchServer.java  |  2 +-
 .../org/apache/nutch/util/WritableTestUtils.java   |  2 +-
 95 files changed, 143 insertions(+), 252 deletions(-)

diff --git a/src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java 
b/src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/crawl/AdaptiveFetchSchedule.java 
b/src/java/org/apache/nutch/crawl/AdaptiveFetchSchedule.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/crawl/CrawlDatum.java 
b/src/java/org/apache/nutch/crawl/CrawlDatum.java
index b50d9c9..b57fc0b 100644
--- a/src/java/org/apache/nutch/crawl/CrawlDatum.java
+++ b/src/java/org/apache/nutch/crawl/CrawlDatum.java
@@ -545,7 +545,7 @@ public class CrawlDatum implements 
WritableComparable<CrawlDatum>, Cloneable {
       jcontext.set("fetchTime", (long)(getFetchTime()));
       jcontext.set("modifiedTime", (long)(getModifiedTime()));
       jcontext.set("retries", getRetriesSinceFetch());
-      jcontext.set("interval", new Integer(getFetchInterval()));
+      jcontext.set("interval", Integer.valueOf(getFetchInterval()));
       jcontext.set("score", getScore());
       jcontext.set("signature", StringUtil.toHexString(getSignature()));
             
diff --git a/src/java/org/apache/nutch/crawl/CrawlDbMerger.java 
b/src/java/org/apache/nutch/crawl/CrawlDbMerger.java
index 25562a6..97730a3 100644
--- a/src/java/org/apache/nutch/crawl/CrawlDbMerger.java
+++ b/src/java/org/apache/nutch/crawl/CrawlDbMerger.java
@@ -43,7 +43,6 @@ import 
org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.nutch.util.LockUtil;
 import org.apache.nutch.util.NutchConfiguration;
 import org.apache.nutch.util.NutchJob;
 import org.apache.nutch.util.TimingUtil;
diff --git a/src/java/org/apache/nutch/crawl/CrawlDbReader.java 
b/src/java/org/apache/nutch/crawl/CrawlDbReader.java
index ea4c964..7c4eb1c 100644
--- a/src/java/org/apache/nutch/crawl/CrawlDbReader.java
+++ b/src/java/org/apache/nutch/crawl/CrawlDbReader.java
@@ -28,7 +28,6 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -43,7 +42,6 @@ import org.slf4j.LoggerFactory;
 import com.tdunning.math.stats.MergingDigest;
 import com.tdunning.math.stats.TDigest;
 
-import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -64,10 +62,8 @@ import 
org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;
-import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.nutch.util.AbstractChecker;
diff --git a/src/java/org/apache/nutch/crawl/DefaultFetchSchedule.java 
b/src/java/org/apache/nutch/crawl/DefaultFetchSchedule.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/crawl/FetchSchedule.java 
b/src/java/org/apache/nutch/crawl/FetchSchedule.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/crawl/FetchScheduleFactory.java 
b/src/java/org/apache/nutch/crawl/FetchScheduleFactory.java
old mode 100755
new mode 100644
index 09a2fd8..e437800
--- a/src/java/org/apache/nutch/crawl/FetchScheduleFactory.java
+++ b/src/java/org/apache/nutch/crawl/FetchScheduleFactory.java
@@ -43,7 +43,7 @@ public class FetchScheduleFactory {
       try {
         LOG.info("Using FetchSchedule impl: " + clazz);
         Class<?> implClass = Class.forName(clazz);
-        impl = (FetchSchedule) implClass.newInstance();
+        impl = (FetchSchedule) implClass.getConstructor().newInstance();
         impl.setConf(conf);
         objectCache.setObject(clazz, impl);
       } catch (Exception e) {
diff --git a/src/java/org/apache/nutch/crawl/MimeAdaptiveFetchSchedule.java 
b/src/java/org/apache/nutch/crawl/MimeAdaptiveFetchSchedule.java
index c6c9ce9..92a4ab9 100644
--- a/src/java/org/apache/nutch/crawl/MimeAdaptiveFetchSchedule.java
+++ b/src/java/org/apache/nutch/crawl/MimeAdaptiveFetchSchedule.java
@@ -154,7 +154,7 @@ public class MimeAdaptiveFetchSchedule extends 
AdaptiveFetchSchedule {
         if (splits.length == 3) {
           // Add a lower cased MIME-type and the factor to the map
           mimeMap.put(StringUtils.lowerCase(splits[0]), new AdaptiveRate(
-              new Float(splits[1]), new Float(splits[2])));
+              Float.valueOf(splits[1]), Float.valueOf(splits[2])));
         } else {
           LOG.warn("Invalid configuration line in: " + line);
         }
diff --git a/src/java/org/apache/nutch/crawl/SignatureFactory.java 
b/src/java/org/apache/nutch/crawl/SignatureFactory.java
index 6832ffc..82e6709 100644
--- a/src/java/org/apache/nutch/crawl/SignatureFactory.java
+++ b/src/java/org/apache/nutch/crawl/SignatureFactory.java
@@ -51,7 +51,7 @@ public class SignatureFactory {
           LOG.info("Using Signature impl: " + clazz);
         }
         Class<?> implClass = Class.forName(clazz);
-        impl = (Signature) implClass.newInstance();
+        impl = (Signature) implClass.getConstructor().newInstance();
         impl.setConf(conf);
         objectCache.setObject(clazz, impl);
       } catch (Exception e) {
diff --git a/src/java/org/apache/nutch/fetcher/Fetcher.java 
b/src/java/org/apache/nutch/fetcher/Fetcher.java
index c7230ac..f6584c5 100644
--- a/src/java/org/apache/nutch/fetcher/Fetcher.java
+++ b/src/java/org/apache/nutch/fetcher/Fetcher.java
@@ -160,7 +160,7 @@ public class Fetcher extends NutchTool implements Tool {
     private void reportStatus(Context context, FetchItemQueues fetchQueues, 
int pagesLastSec, int bytesLastSec)
         throws IOException {
       StringBuilder status = new StringBuilder();
-      Long elapsed = new Long((System.currentTimeMillis() - start) / 1000);
+      Long elapsed = Long.valueOf((System.currentTimeMillis() - start) / 1000);
 
       float avgPagesSec = (float) pages.get() / elapsed.floatValue();
       long avgBytesSec = (bytes.get() / 128l) / elapsed.longValue();
diff --git a/src/java/org/apache/nutch/hostdb/ReadHostDb.java 
b/src/java/org/apache/nutch/hostdb/ReadHostDb.java
index 360b119..a17a90b 100644
--- a/src/java/org/apache/nutch/hostdb/ReadHostDb.java
+++ b/src/java/org/apache/nutch/hostdb/ReadHostDb.java
@@ -224,8 +224,8 @@ public class ReadHostDb extends Configured implements Tool {
     if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
       throw new IOException("Incompatible key (" + keyClass.getName() + ")");
       
-    Text key = (Text) keyClass.newInstance();
-    HostDatum value = (HostDatum) valueClass.newInstance();
+    Text key = (Text) keyClass.getConstructor().newInstance();
+    HostDatum value = (HostDatum) valueClass.getConstructor().newInstance();
     
     for (int i = 0; i < readers.length; i++) {
       while (readers[i].next(key, value)) {
diff --git a/src/java/org/apache/nutch/hostdb/ResolverThread.java 
b/src/java/org/apache/nutch/hostdb/ResolverThread.java
index 53517b8..fe66217 100644
--- a/src/java/org/apache/nutch/hostdb/ResolverThread.java
+++ b/src/java/org/apache/nutch/hostdb/ResolverThread.java
@@ -61,6 +61,7 @@ public class ResolverThread implements Runnable {
     // Resolve the host and act appropriatly
     try {
       // Throws an exception if host is not found
+      @SuppressWarnings("unused")
       InetAddress inetAddr = InetAddress.getByName(host);
 
       if (datum.isEmpty()) {
diff --git a/src/java/org/apache/nutch/indexer/CleaningJob.java 
b/src/java/org/apache/nutch/indexer/CleaningJob.java
index 7a0f70e..8a77a9d 100644
--- a/src/java/org/apache/nutch/indexer/CleaningJob.java
+++ b/src/java/org/apache/nutch/indexer/CleaningJob.java
@@ -77,7 +77,9 @@ public class CleaningJob implements Tool {
 
   public static class DeleterReducer extends
       Reducer<ByteWritable, Text, Text, ByteWritable> {
+    @SuppressWarnings("unused")
     private static final int NUM_MAX_DELETE_REQUEST = 1000;
+    @SuppressWarnings("unused")
     private int numDeletes = 0;
     private int totalDeleted = 0;
 
diff --git a/src/java/org/apache/nutch/indexer/IndexWriter.java 
b/src/java/org/apache/nutch/indexer/IndexWriter.java
index 4413699..b33c507 100644
--- a/src/java/org/apache/nutch/indexer/IndexWriter.java
+++ b/src/java/org/apache/nutch/indexer/IndexWriter.java
@@ -28,6 +28,9 @@ public interface IndexWriter extends Pluggable, Configurable {
    */
   final static String X_POINT_ID = IndexWriter.class.getName();
 
+  /**
+   * @deprecated use {@link #open(IndexWriterParams)}} instead.  
+   */
   @Deprecated
   public void open(Configuration conf, String name) throws IOException;
 
diff --git a/src/java/org/apache/nutch/indexer/IndexingFilters.java 
b/src/java/org/apache/nutch/indexer/IndexingFilters.java
index ca603d4..5ebdd7f 100644
--- a/src/java/org/apache/nutch/indexer/IndexingFilters.java
+++ b/src/java/org/apache/nutch/indexer/IndexingFilters.java
@@ -17,9 +17,6 @@
 
 package org.apache.nutch.indexer;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.nutch.plugin.PluginRepository;
 import org.apache.nutch.parse.Parse;
 import org.apache.hadoop.conf.Configuration;
@@ -27,16 +24,11 @@ import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.crawl.Inlinks;
 import org.apache.hadoop.io.Text;
 
-import java.lang.invoke.MethodHandles;
-
 /** Creates and caches {@link IndexingFilter} implementing plugins. */
 public class IndexingFilters {
 
   public static final String INDEXINGFILTER_ORDER = "indexingfilter.order";
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   private IndexingFilter[] indexingFilters;
 
   public IndexingFilters(Configuration conf) {
diff --git a/src/java/org/apache/nutch/plugin/Extension.java 
b/src/java/org/apache/nutch/plugin/Extension.java
index 7c074de..e73b850 100644
--- a/src/java/org/apache/nutch/plugin/Extension.java
+++ b/src/java/org/apache/nutch/plugin/Extension.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nutch.plugin;
 
+import java.lang.reflect.InvocationTargetException;
 import java.util.HashMap;
 
 import org.apache.hadoop.conf.Configuration;
@@ -158,8 +159,13 @@ public class Extension {
         // lazy loading of Plugin in case there is no instance of the plugin
         // already.
         pluginRepository.getPluginInstance(getDescriptor());
-        Object object = extensionClazz.newInstance();
-        if (object instanceof Configurable) {
+        Object object = null;
+        try {
+          object = extensionClazz.getConstructor().newInstance();
+        } catch (IllegalArgumentException | InvocationTargetException | 
NoSuchMethodException | SecurityException e) {
+          e.printStackTrace();
+        }
+        if (object != null && object instanceof Configurable) {
           ((Configurable) object).setConf(this.conf);
         }
         return object;
diff --git a/src/java/org/apache/nutch/plugin/Plugin.java 
b/src/java/org/apache/nutch/plugin/Plugin.java
index e78754b..8325a56 100644
--- a/src/java/org/apache/nutch/plugin/Plugin.java
+++ b/src/java/org/apache/nutch/plugin/Plugin.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configuration;
  * instances are used as the point of life cycle managemet of plugin related
  * functionality.
  * 
- * The <code>Plugin</code> will be startuped and shutdown by the nutch plugin
+ * The <code>Plugin</code> will be started up and shutdown by the nutch plugin
  * management system.
  * 
  * A possible usecase of the <code>Plugin</code> implementation is to create or
@@ -88,6 +88,7 @@ public class Plugin {
     fDescriptor = descriptor;
   }
 
+  @SuppressWarnings("deprecation")
   protected void finalize() throws Throwable {
     super.finalize();
     shutDown();
diff --git a/src/java/org/apache/nutch/protocol/Content.java 
b/src/java/org/apache/nutch/protocol/Content.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/protocol/Protocol.java 
b/src/java/org/apache/nutch/protocol/Protocol.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/protocol/ProtocolException.java 
b/src/java/org/apache/nutch/protocol/ProtocolException.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/protocol/ProtocolFactory.java 
b/src/java/org/apache/nutch/protocol/ProtocolFactory.java
index b39155b..87944a8 100644
--- a/src/java/org/apache/nutch/protocol/ProtocolFactory.java
+++ b/src/java/org/apache/nutch/protocol/ProtocolFactory.java
@@ -17,12 +17,9 @@
 
 package org.apache.nutch.protocol;
 
-import java.lang.invoke.MethodHandles;
 import java.net.URL;
 import java.net.MalformedURLException;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.nutch.plugin.Extension;
 import org.apache.nutch.plugin.ExtensionPoint;
 import org.apache.nutch.plugin.PluginRepository;
@@ -40,9 +37,6 @@ import org.apache.hadoop.conf.Configuration;
  */
 public class ProtocolFactory {
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   private ExtensionPoint extensionPoint;
 
   private Configuration conf;
diff --git a/src/java/org/apache/nutch/protocol/ProtocolStatus.java 
b/src/java/org/apache/nutch/protocol/ProtocolStatus.java
index 2791406..46f9730 100644
--- a/src/java/org/apache/nutch/protocol/ProtocolStatus.java
+++ b/src/java/org/apache/nutch/protocol/ProtocolStatus.java
@@ -101,22 +101,22 @@ public class ProtocolStatus implements Writable {
 
   private static final HashMap<Integer, String> codeToName = new HashMap<>();
   static {
-    codeToName.put(new Integer(SUCCESS), "success");
-    codeToName.put(new Integer(FAILED), "failed");
-    codeToName.put(new Integer(PROTO_NOT_FOUND), "proto_not_found");
-    codeToName.put(new Integer(GONE), "gone");
-    codeToName.put(new Integer(MOVED), "moved");
-    codeToName.put(new Integer(TEMP_MOVED), "temp_moved");
-    codeToName.put(new Integer(NOTFOUND), "notfound");
-    codeToName.put(new Integer(RETRY), "retry");
-    codeToName.put(new Integer(EXCEPTION), "exception");
-    codeToName.put(new Integer(ACCESS_DENIED), "access_denied");
-    codeToName.put(new Integer(ROBOTS_DENIED), "robots_denied");
-    codeToName.put(new Integer(REDIR_EXCEEDED), "redir_exceeded");
-    codeToName.put(new Integer(NOTFETCHING), "notfetching");
-    codeToName.put(new Integer(NOTMODIFIED), "notmodified");
-    codeToName.put(new Integer(WOULDBLOCK), "wouldblock");
-    codeToName.put(new Integer(BLOCKED), "blocked");
+    codeToName.put(Integer.valueOf(SUCCESS), "success");
+    codeToName.put(Integer.valueOf(FAILED), "failed");
+    codeToName.put(Integer.valueOf(PROTO_NOT_FOUND), "proto_not_found");
+    codeToName.put(Integer.valueOf(GONE), "gone");
+    codeToName.put(Integer.valueOf(MOVED), "moved");
+    codeToName.put(Integer.valueOf(TEMP_MOVED), "temp_moved");
+    codeToName.put(Integer.valueOf(NOTFOUND), "notfound");
+    codeToName.put(Integer.valueOf(RETRY), "retry");
+    codeToName.put(Integer.valueOf(EXCEPTION), "exception");
+    codeToName.put(Integer.valueOf(ACCESS_DENIED), "access_denied");
+    codeToName.put(Integer.valueOf(ROBOTS_DENIED), "robots_denied");
+    codeToName.put(Integer.valueOf(REDIR_EXCEEDED), "redir_exceeded");
+    codeToName.put(Integer.valueOf(NOTFETCHING), "notfetching");
+    codeToName.put(Integer.valueOf(NOTMODIFIED), "notmodified");
+    codeToName.put(Integer.valueOf(WOULDBLOCK), "wouldblock");
+    codeToName.put(Integer.valueOf(BLOCKED), "blocked");
   }
 
   public ProtocolStatus() {
@@ -280,7 +280,7 @@ public class ProtocolStatus implements Writable {
 
   public String toString() {
     StringBuffer res = new StringBuffer();
-    res.append(codeToName.get(new Integer(code)) + "(" + code
+    res.append(codeToName.get(Integer.valueOf(code)) + "(" + code
         + "), lastModified=" + lastModified);
     if (args != null) {
       if (args.length == 1) {
diff --git a/src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java 
b/src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java
index 6d751c0..6d75cde 100644
--- a/src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java
+++ b/src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java
@@ -69,6 +69,7 @@ public class ContentAsTextInputFormat extends
 
     }
 
+    @SuppressWarnings("unused")
     public synchronized boolean next(Text key, Text value) 
         throws IOException, InterruptedException {
 
diff --git a/src/java/org/apache/nutch/segment/SegmentReader.java 
b/src/java/org/apache/nutch/segment/SegmentReader.java
index 2b99435..c09c7ca 100644
--- a/src/java/org/apache/nutch/segment/SegmentReader.java
+++ b/src/java/org/apache/nutch/segment/SegmentReader.java
@@ -421,16 +421,16 @@ public class SegmentReader extends Configured implements 
Tool {
     Class<?> valueClass = readers[0].getValueClass();
     if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
       throw new IOException("Incompatible key (" + keyClass.getName() + ")");
-    Writable value = (Writable) valueClass.newInstance();
+    Writable value = (Writable) valueClass.getConstructor().newInstance();
     // we don't know the partitioning schema
     for (int i = 0; i < readers.length; i++) {
       if (readers[i].get(key, value) != null) {
         res.add(value);
-        value = (Writable) valueClass.newInstance();
-        Text aKey = (Text) keyClass.newInstance();
+        value = (Writable) valueClass.getConstructor().newInstance();
+        Text aKey = (Text) keyClass.getConstructor().newInstance();
         while (readers[i].next(aKey, value) && aKey.equals(key)) {
           res.add(value);
-          value = (Writable) valueClass.newInstance();
+          value = (Writable) valueClass.getConstructor().newInstance();
         }
       }
       readers[i].close();
@@ -446,13 +446,13 @@ public class SegmentReader extends Configured implements 
Tool {
     Class<?> valueClass = readers[0].getValueClass();
     if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
       throw new IOException("Incompatible key (" + keyClass.getName() + ")");
-    WritableComparable<?> aKey = (WritableComparable<?>) 
keyClass.newInstance();
-    Writable value = (Writable) valueClass.newInstance();
+    WritableComparable<?> aKey = (WritableComparable<?>) 
keyClass.getConstructor().newInstance();
+    Writable value = (Writable) valueClass.getConstructor().newInstance();
     for (int i = 0; i < readers.length; i++) {
       while (readers[i].next(aKey, value)) {
         if (aKey.equals(key)) {
           res.add(value);
-          value = (Writable) valueClass.newInstance();
+          value = (Writable) valueClass.getConstructor().newInstance();
         }
       }
       readers[i].close();
diff --git a/src/java/org/apache/nutch/service/impl/LinkReader.java 
b/src/java/org/apache/nutch/service/impl/LinkReader.java
index 39e1106..9d2ffcb 100644
--- a/src/java/org/apache/nutch/service/impl/LinkReader.java
+++ b/src/java/org/apache/nutch/service/impl/LinkReader.java
@@ -60,7 +60,6 @@ public class LinkReader implements NutchReader{
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, 
StringUtils.stringifyException(e));
       throw new WebApplicationException();
@@ -93,7 +92,6 @@ public class LinkReader implements NutchReader{
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, 
StringUtils.stringifyException(e));
       throw new WebApplicationException();
@@ -128,7 +126,6 @@ public class LinkReader implements NutchReader{
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, 
StringUtils.stringifyException(e));
       throw new WebApplicationException();
@@ -154,7 +151,6 @@ public class LinkReader implements NutchReader{
     } catch(FileNotFoundException fne){ 
       throw new FileNotFoundException();
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       LOG.error("Error occurred while reading file {} : ", file, 
StringUtils.stringifyException(e));
       throw new WebApplicationException();
     } 
@@ -162,14 +158,14 @@ public class LinkReader implements NutchReader{
   }
 
   private HashMap<String, String> getLinksRow(Writable key, LinkDatum value) {
-    HashMap<String, String> t_row = new HashMap<>();
-    t_row.put("key_url", key.toString());
-    t_row.put("url", value.getUrl());
-    t_row.put("anchor", value.getAnchor());
-    t_row.put("score", String.valueOf(value.getScore()));
-    t_row.put("timestamp", String.valueOf(value.getTimestamp()));
-    t_row.put("linktype", String.valueOf(value.getLinkType()));
-
-    return t_row;
+    HashMap<String, String> tRow = new HashMap<>();
+    tRow.put("key_url", key.toString());
+    tRow.put("url", value.getUrl());
+    tRow.put("anchor", value.getAnchor());
+    tRow.put("score", String.valueOf(value.getScore()));
+    tRow.put("timestamp", String.valueOf(value.getTimestamp()));
+    tRow.put("linktype", String.valueOf(value.getLinkType()));
+
+    return tRow;
   }
 }
diff --git a/src/java/org/apache/nutch/service/impl/NodeReader.java 
b/src/java/org/apache/nutch/service/impl/NodeReader.java
index 28d6600..e52f6a6 100644
--- a/src/java/org/apache/nutch/service/impl/NodeReader.java
+++ b/src/java/org/apache/nutch/service/impl/NodeReader.java
@@ -60,7 +60,6 @@ public class NodeReader implements NutchReader {
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, 
StringUtils.stringifyException(e));
       throw new WebApplicationException();
@@ -93,7 +92,6 @@ public class NodeReader implements NutchReader {
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, 
           StringUtils.stringifyException(e));
@@ -129,7 +127,6 @@ public class NodeReader implements NutchReader {
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, 
           StringUtils.stringifyException(e));
@@ -159,7 +156,6 @@ public class NodeReader implements NutchReader {
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, 
           StringUtils.stringifyException(e));
@@ -170,14 +166,14 @@ public class NodeReader implements NutchReader {
   }
 
   private HashMap<String, String> getNodeRow(Writable key, Node value) {
-    HashMap<String, String> t_row = new HashMap<>();
-    t_row.put("key_url", key.toString());
-    t_row.put("num_inlinks", String.valueOf(value.getNumInlinks()) );
-    t_row.put("num_outlinks", String.valueOf(value.getNumOutlinks()) );
-    t_row.put("inlink_score", String.valueOf(value.getInlinkScore()));
-    t_row.put("outlink_score", String.valueOf(value.getOutlinkScore()));
-    t_row.put("metadata", value.getMetadata().toString());
-
-    return t_row;
+    HashMap<String, String> tRow = new HashMap<>();
+    tRow.put("key_url", key.toString());
+    tRow.put("num_inlinks", String.valueOf(value.getNumInlinks()) );
+    tRow.put("num_outlinks", String.valueOf(value.getNumOutlinks()) );
+    tRow.put("inlink_score", String.valueOf(value.getInlinkScore()));
+    tRow.put("outlink_score", String.valueOf(value.getOutlinkScore()));
+    tRow.put("metadata", value.getMetadata().toString());
+
+    return tRow;
   }
 }
diff --git 
a/src/java/org/apache/nutch/service/impl/NutchServerPoolExecutor.java 
b/src/java/org/apache/nutch/service/impl/NutchServerPoolExecutor.java
index 147b61a..b3bcb2e 100644
--- a/src/java/org/apache/nutch/service/impl/NutchServerPoolExecutor.java
+++ b/src/java/org/apache/nutch/service/impl/NutchServerPoolExecutor.java
@@ -48,6 +48,7 @@ public class NutchServerPoolExecutor extends 
ThreadPoolExecutor{
       runningWorkers.offer(((JobWorker) runnable));
     }
   }
+  @SuppressWarnings("unlikely-arg-type")
   @Override
   protected void afterExecute(Runnable runnable, Throwable throwable) {
     super.afterExecute(runnable, throwable);
diff --git 
a/src/java/org/apache/nutch/service/model/response/FetchNodeDbInfo.java 
b/src/java/org/apache/nutch/service/model/response/FetchNodeDbInfo.java
index 6c76a7d..bac0924 100644
--- a/src/java/org/apache/nutch/service/model/response/FetchNodeDbInfo.java
+++ b/src/java/org/apache/nutch/service/model/response/FetchNodeDbInfo.java
@@ -76,15 +76,19 @@ public class FetchNodeDbInfo {
       this.anchorText = anchorText;
     }
     
+    @SuppressWarnings("unused")
     public String getAnchorText() {
       return anchorText;
     }
+    @SuppressWarnings("unused")
     public void setAnchorText(String anchorText) {
       this.anchorText = anchorText;
     }
+    @SuppressWarnings("unused")
     public String getChildUrl() {
       return childUrl;
     }
+    @SuppressWarnings("unused")
     public void setChildUrl(String childUrl) {
       this.childUrl = childUrl;
     }
diff --git a/src/java/org/apache/nutch/service/resources/DbResource.java 
b/src/java/org/apache/nutch/service/resources/DbResource.java
index aeeb27b..67771d4 100644
--- a/src/java/org/apache/nutch/service/resources/DbResource.java
+++ b/src/java/org/apache/nutch/service/resources/DbResource.java
@@ -111,6 +111,7 @@ public class DbResource extends AbstractResource {
 
   @Produces(MediaType.APPLICATION_OCTET_STREAM)
   private Response crawlDbDump(Configuration conf, Map<String, String> args, 
String crawlId){
+    @SuppressWarnings("resource")
     CrawlDbReader dbr = new CrawlDbReader();
     try{
       return Response.ok(dbr.query(args, conf, "dump", crawlId), 
MediaType.APPLICATION_OCTET_STREAM).build();
@@ -122,6 +123,7 @@ public class DbResource extends AbstractResource {
 
   @Produces(MediaType.APPLICATION_OCTET_STREAM)
   private Response crawlDbTopN(Configuration conf, Map<String, String> args, 
String crawlId) {
+    @SuppressWarnings("resource")
     CrawlDbReader dbr = new CrawlDbReader();
     try{
       return Response.ok(dbr.query(args, conf, "topN", crawlId), 
MediaType.APPLICATION_OCTET_STREAM).build();
@@ -132,6 +134,7 @@ public class DbResource extends AbstractResource {
   }
 
   private Response crawlDbUrl(Configuration conf, Map<String, String> args, 
String crawlId){
+    @SuppressWarnings("resource")
     CrawlDbReader dbr = new CrawlDbReader();
     try{
       return Response.ok(dbr.query(args, conf, "url", crawlId)).build();
diff --git a/src/java/org/apache/nutch/tools/Benchmark.java 
b/src/java/org/apache/nutch/tools/Benchmark.java
old mode 100755
new mode 100644
index 0c82fc3..c8b4a94
--- a/src/java/org/apache/nutch/tools/Benchmark.java
+++ b/src/java/org/apache/nutch/tools/Benchmark.java
@@ -195,6 +195,7 @@ public class Benchmark extends Configured implements Tool {
     conf.setInt(Generator.GENERATOR_MAX_COUNT, maxPerHost);
     conf.set(Generator.GENERATOR_COUNT_MODE,
         Generator.GENERATOR_COUNT_VALUE_HOST);
+    @SuppressWarnings("unused")
     Job job = NutchJob.getInstance(getConf());
     FileSystem fs = FileSystem.get(conf);
     Path dir = new Path(getConf().get("hadoop.tmp.dir"), "bench-"
@@ -276,6 +277,7 @@ public class Benchmark extends Configured implements Tool {
       LOG.info("crawl finished: " + dir);
     }
     res.elapsed = System.currentTimeMillis() - res.elapsed;
+    @SuppressWarnings("resource")
     CrawlDbReader dbreader = new CrawlDbReader();
     dbreader.processStatJob(crawlDb.toString(), conf, false);
     return res;
diff --git a/src/java/org/apache/nutch/tools/CommonCrawlDataDumper.java 
b/src/java/org/apache/nutch/tools/CommonCrawlDataDumper.java
index 80adc05..c013059 100644
--- a/src/java/org/apache/nutch/tools/CommonCrawlDataDumper.java
+++ b/src/java/org/apache/nutch/tools/CommonCrawlDataDumper.java
@@ -283,7 +283,7 @@ public class CommonCrawlDataDumper extends NutchTool 
implements Tool {
         SequenceFile.Reader reader = new SequenceFile.Reader(nutchConfig,
             SequenceFile.Reader.file(segmentPart));
 
-        Writable key = (Writable) reader.getKeyClass().newInstance();
+        Writable key = (Writable) 
reader.getKeyClass().getConstructor().newInstance();
 
         Content content = null;
         while (reader.next(key)) {
diff --git a/src/java/org/apache/nutch/tools/CommonCrawlFormatWARC.java 
b/src/java/org/apache/nutch/tools/CommonCrawlFormatWARC.java
index b793361..6f89b16 100644
--- a/src/java/org/apache/nutch/tools/CommonCrawlFormatWARC.java
+++ b/src/java/org/apache/nutch/tools/CommonCrawlFormatWARC.java
@@ -160,8 +160,6 @@ public class CommonCrawlFormatWARC extends 
AbstractCommonCrawlFormat {
     record.setType(WARCConstants.WARCRecordType.response);
     record.setUrl(getUrl());
 
-    String fetchTime;
-
     record.setCreate14DigitDate(DateUtils
         .getLog14Date(Long.parseLong(metadata.get("nutch.fetch.time"))));
     record.setMimetype(WARCConstants.HTTP_RESPONSE_MIMETYPE);
diff --git a/src/java/org/apache/nutch/tools/DmozParser.java 
b/src/java/org/apache/nutch/tools/DmozParser.java
index 217a15e..fa7e7d6 100644
--- a/src/java/org/apache/nutch/tools/DmozParser.java
+++ b/src/java/org/apache/nutch/tools/DmozParser.java
@@ -113,10 +113,10 @@ public class DmozParser {
    */
   private class RDFProcessor extends DefaultHandler {
     String curURL = null, curSection = null;
-    boolean titlePending = false, descPending = false,
-        insideAdultSection = false;
+    boolean titlePending = false, descPending = false;
     Pattern topicPattern = null;
     StringBuffer title = new StringBuffer(), desc = new StringBuffer();
+    @SuppressWarnings("unused")
     XMLReader reader;
     int subsetDenom;
     int hashSkew;
@@ -259,17 +259,6 @@ public class DmozParser {
     }
 
     /**
-     * Emit the exception message, with line numbers
-     */
-    public void errorError(SAXParseException spe) {
-      if (LOG.isErrorEnabled()) {
-        LOG.error("Fatal err: " + spe.toString() + ": " + spe.getMessage());
-        LOG.error("Last known line is " + location.getLineNumber()
-            + ", column " + location.getColumnNumber());
-      }
-    }
-
-    /**
      * Emit exception warning message
      */
     public void warning(SAXParseException spe) {
diff --git a/src/java/org/apache/nutch/tools/FileDumper.java 
b/src/java/org/apache/nutch/tools/FileDumper.java
index fcf2f19..d09ad74 100644
--- a/src/java/org/apache/nutch/tools/FileDumper.java
+++ b/src/java/org/apache/nutch/tools/FileDumper.java
@@ -172,7 +172,7 @@ public class FileDumper {
 
           SequenceFile.Reader reader = new SequenceFile.Reader(conf, 
SequenceFile.Reader.file(file));
 
-          Writable key = (Writable) reader.getKeyClass().newInstance();
+          Writable key = (Writable) 
reader.getKeyClass().getConstructor().newInstance();
           Content content = null;
 
           while (reader.next(key)) {
diff --git a/src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java 
b/src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java
index 499b246..7685e5f 100644
--- a/src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java
+++ b/src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java
@@ -344,6 +344,7 @@ public class ArcSegmentCreator extends Configured 
implements Tool {
 
           // set the url version into the metadata
           content.getMetadata().set(URL_VERSION, version);
+          @SuppressWarnings("unused")
           ParseStatus pstatus = null;
           pstatus = output(context, segmentName, url, datum, content, status,
               CrawlDatum.STATUS_FETCH_SUCCESS);
diff --git a/src/java/org/apache/nutch/tools/warc/WARCExporter.java 
b/src/java/org/apache/nutch/tools/warc/WARCExporter.java
index ee9879a..a7e08c7 100644
--- a/src/java/org/apache/nutch/tools/warc/WARCExporter.java
+++ b/src/java/org/apache/nutch/tools/warc/WARCExporter.java
@@ -259,7 +259,6 @@ public class WARCExporter extends Configured implements 
Tool {
 
     final Job job = NutchJob.getInstance(getConf());
     job.setJobName("warc-exporter " + output);
-    Configuration conf = job.getConfiguration();
 
     for (final Path segment : segments) {
       LOG.info("warc-exporter: adding segment: {}", segment);
diff --git a/src/java/org/apache/nutch/util/AbstractChecker.java 
b/src/java/org/apache/nutch/util/AbstractChecker.java
index 8d365ec..e0af36d 100644
--- a/src/java/org/apache/nutch/util/AbstractChecker.java
+++ b/src/java/org/apache/nutch/util/AbstractChecker.java
@@ -94,6 +94,7 @@ public abstract class AbstractChecker extends Configured 
implements Tool {
     String line;
     while ((line = in.readLine()) != null) {
       StringBuilder output = new StringBuilder();
+      @SuppressWarnings("unused")
       int ret = process(line, output);
       System.out.println(output);
     }
@@ -101,6 +102,7 @@ public abstract class AbstractChecker extends Configured 
implements Tool {
   }
 
   // Open TCP socket and process input
+  @SuppressWarnings("resource")
   protected void processTCP(int tcpPort) throws Exception {
     ServerSocket server = null;
 
diff --git a/src/java/org/apache/nutch/util/CrawlCompletionStats.java 
b/src/java/org/apache/nutch/util/CrawlCompletionStats.java
index 116c311..4208b5c 100644
--- a/src/java/org/apache/nutch/util/CrawlCompletionStats.java
+++ b/src/java/org/apache/nutch/util/CrawlCompletionStats.java
@@ -68,28 +68,30 @@ public class CrawlCompletionStats extends Configured 
implements Tool {
   private static final int MODE_HOST = 1;
   private static final int MODE_DOMAIN = 2;
 
-  private int mode = 0;
-
   public int run(String[] args) throws Exception {
     Option helpOpt = new Option("h", "help", false, "Show this message");
+    @SuppressWarnings("static-access")
     Option inDirs = OptionBuilder
         .withArgName("inputDirs")
         .isRequired()
         .withDescription("Comma separated list of crawl directories (e.g., 
\"./crawl1,./crawl2\")")
         .hasArgs()
         .create("inputDirs");
+    @SuppressWarnings("static-access")
     Option outDir = OptionBuilder
         .withArgName("outputDir")
         .isRequired()
         .withDescription("Output directory where results should be dumped")
         .hasArgs()
         .create("outputDir");
+    @SuppressWarnings("static-access")
     Option modeOpt = OptionBuilder
         .withArgName("mode")
         .isRequired()
         .withDescription("Set statistics gathering mode (by 'host' or by 
'domain')")
         .hasArgs()
         .create("mode");
+    @SuppressWarnings("static-access")
     Option numReducers = OptionBuilder
         .withArgName("numReducers")
         .withDescription("Optional number of reduce jobs to use. Defaults to 
1")
diff --git a/src/java/org/apache/nutch/util/EncodingDetector.java 
b/src/java/org/apache/nutch/util/EncodingDetector.java
index ba36205..01e65e5 100644
--- a/src/java/org/apache/nutch/util/EncodingDetector.java
+++ b/src/java/org/apache/nutch/util/EncodingDetector.java
@@ -79,10 +79,12 @@ public class EncodingDetector {
       this.confidence = confidence;
     }
 
+    @SuppressWarnings("unused")
     public String getSource() {
       return source;
     }
 
+    @SuppressWarnings("unused")
     public String getValue() {
       return value;
     }
@@ -354,6 +356,7 @@ public class EncodingDetector {
         NutchConfiguration.create());
 
     // do everything as bytes; don't want any conversion
+    @SuppressWarnings("resource")
     BufferedInputStream istr = new BufferedInputStream(new FileInputStream(
         args[0]));
     ByteArrayOutputStream ostr = new ByteArrayOutputStream();
diff --git a/src/java/org/apache/nutch/util/GenericWritableConfigurable.java 
b/src/java/org/apache/nutch/util/GenericWritableConfigurable.java
index 755aad0..086ca9b 100644
--- a/src/java/org/apache/nutch/util/GenericWritableConfigurable.java
+++ b/src/java/org/apache/nutch/util/GenericWritableConfigurable.java
@@ -46,7 +46,7 @@ public abstract class GenericWritableConfigurable extends 
GenericWritable
     byte type = in.readByte();
     Class<?> clazz = getTypes()[type];
     try {
-      set((Writable) clazz.newInstance());
+      set((Writable) clazz.getConstructor().newInstance());
     } catch (Exception e) {
       e.printStackTrace();
       throw new IOException("Cannot initialize the class: " + clazz);
diff --git a/src/java/org/apache/nutch/util/domain/DomainStatistics.java 
b/src/java/org/apache/nutch/util/domain/DomainStatistics.java
index 1eec59e..32ba10f 100644
--- a/src/java/org/apache/nutch/util/domain/DomainStatistics.java
+++ b/src/java/org/apache/nutch/util/domain/DomainStatistics.java
@@ -63,8 +63,6 @@ public class DomainStatistics extends Configured implements 
Tool {
   private static final int MODE_SUFFIX = 3;
   private static final int MODE_TLD = 4;
 
-  private int mode = 0;
-
   public int run(String[] args) throws Exception {
     if (args.length < 3) {
       System.err.println("Usage: DomainStatistics inputDirs outDir mode 
[numOfReducer]");
diff --git 
a/src/plugin/any23/src/test/org/apache/nutch/any23/TestAny23ParseFilter.java 
b/src/plugin/any23/src/test/org/apache/nutch/any23/TestAny23ParseFilter.java
index 4271730..251dfaf 100644
--- a/src/plugin/any23/src/test/org/apache/nutch/any23/TestAny23ParseFilter.java
+++ b/src/plugin/any23/src/test/org/apache/nutch/any23/TestAny23ParseFilter.java
@@ -16,31 +16,18 @@
  */
 package org.apache.nutch.any23;
 
-import java.io.DataInputStream;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-import org.apache.avro.util.Utf8;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
-import org.apache.nutch.indexer.NutchDocument;
-import org.apache.nutch.metadata.Metadata;
-import org.apache.nutch.parse.Outlink;
 import org.apache.nutch.parse.Parse;
-import org.apache.nutch.parse.ParseData;
 import org.apache.nutch.parse.ParseException;
-import org.apache.nutch.parse.ParseImpl;
-import org.apache.nutch.parse.ParseStatus;
 import org.apache.nutch.parse.ParseUtil;
 import org.apache.nutch.parse.ParserNotFound;
 import org.apache.nutch.protocol.Content;
 import org.apache.nutch.protocol.Protocol;
 import org.apache.nutch.protocol.ProtocolFactory;
-import org.apache.nutch.util.MimeUtil;
 import org.apache.nutch.util.NutchConfiguration;
 import org.junit.Assert;
 import org.junit.Before;
diff --git 
a/src/plugin/creativecommons/src/test/org/creativecommons/nutch/TestCCParseFilter.java
 
b/src/plugin/creativecommons/src/test/org/creativecommons/nutch/TestCCParseFilter.java
old mode 100755
new mode 100644
diff --git 
a/src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java 
b/src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java
index f5d4807..9243009 100644
--- a/src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java
+++ b/src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java
@@ -17,16 +17,12 @@
 
 package org.apache.nutch.parse.feed;
 
-// JDK imports
-import java.lang.invoke.MethodHandles;
 import java.util.Iterator;
 import java.util.Map;
 
 import org.junit.Assert;
 import org.junit.Test;
-// APACHE imports
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -58,10 +54,6 @@ public class TestFeedParser {
   // ./src/plugin/feed/build.xml during plugin compilation.
 
   private String[] sampleFiles = { "rsstest.rss" };
-
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   /**
    * Calls the {@link FeedParser} on a sample RSS file and checks that there 
are
    * 3 {@link ParseResult} entries including the below 2 links:
diff --git 
a/src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java
 
b/src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java
index 5e1233b..0e62fea 100644
--- 
a/src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java
+++ 
b/src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java
@@ -17,9 +17,6 @@
 
 package org.apache.nutch.indexer.basic;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.nutch.metadata.Nutch;
 import org.apache.nutch.parse.Parse;
 
@@ -33,7 +30,6 @@ import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.crawl.Inlinks;
 
-import java.lang.invoke.MethodHandles;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Date;
@@ -49,8 +45,6 @@ import org.apache.hadoop.conf.Configuration;
  * {@code indexer.max.content.length} in nutch-default.xml.
  */
 public class BasicIndexingFilter implements IndexingFilter {
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
 
   private int MAX_TITLE_LENGTH;
   private int MAX_CONTENT_LENGTH;
diff --git 
a/src/plugin/index-geoip/src/java/org/apache/nutch/indexer/geoip/GeoIPDocumentCreator.java
 
b/src/plugin/index-geoip/src/java/org/apache/nutch/indexer/geoip/GeoIPDocumentCreator.java
index 88d78ef..38e75b1 100644
--- 
a/src/plugin/index-geoip/src/java/org/apache/nutch/indexer/geoip/GeoIPDocumentCreator.java
+++ 
b/src/plugin/index-geoip/src/java/org/apache/nutch/indexer/geoip/GeoIPDocumentCreator.java
@@ -111,8 +111,9 @@ public class GeoIPDocumentCreator {
     doc.add("isp", traits.getIsp());
     doc.add("org", traits.getOrganization());
     doc.add("userType", traits.getUserType());
+    //for better results, users should upgrade to
+    
//https://www.maxmind.com/en/solutions/geoip2-enterprise-product-suite/anonymous-ip-database
     doc.add("isAnonProxy", traits.isAnonymousProxy());
-    doc.add("isSatelliteProv", traits.isSatelliteProvider());
     return doc;
   }
 
diff --git 
a/src/plugin/index-jexl-filter/src/java/org/apache/nutch/indexer/jexl/JexlIndexingFilter.java
 
b/src/plugin/index-jexl-filter/src/java/org/apache/nutch/indexer/jexl/JexlIndexingFilter.java
index 24284a6..3fa2294 100644
--- 
a/src/plugin/index-jexl-filter/src/java/org/apache/nutch/indexer/jexl/JexlIndexingFilter.java
+++ 
b/src/plugin/index-jexl-filter/src/java/org/apache/nutch/indexer/jexl/JexlIndexingFilter.java
@@ -61,7 +61,7 @@ public class JexlIndexingFilter implements IndexingFilter {
     jcontext.set("fetchTime", (long) (datum.getFetchTime()));
     jcontext.set("modifiedTime", (long) (datum.getModifiedTime()));
     jcontext.set("retries", datum.getRetriesSinceFetch());
-    jcontext.set("interval", new Integer(datum.getFetchInterval()));
+    jcontext.set("interval", Integer.valueOf(datum.getFetchInterval()));
     jcontext.set("score", datum.getScore());
     jcontext.set("signature", StringUtil.toHexString(datum.getSignature()));
     jcontext.set("url", url.toString());
diff --git 
a/src/plugin/index-links/src/test/org/apache/nutch/indexer/links/TestLinksIndexingFilter.java
 
b/src/plugin/index-links/src/test/org/apache/nutch/indexer/links/TestLinksIndexingFilter.java
index a71f8ba..0b22a98 100644
--- 
a/src/plugin/index-links/src/test/org/apache/nutch/indexer/links/TestLinksIndexingFilter.java
+++ 
b/src/plugin/index-links/src/test/org/apache/nutch/indexer/links/TestLinksIndexingFilter.java
@@ -36,7 +36,6 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.net.URL;
-import java.util.Iterator;
 
 public class TestLinksIndexingFilter {
 
diff --git 
a/src/plugin/index-replace/src/java/org/apache/nutch/indexer/replace/ReplaceIndexer.java
 
b/src/plugin/index-replace/src/java/org/apache/nutch/indexer/replace/ReplaceIndexer.java
index cb178b2..4066ce0 100644
--- 
a/src/plugin/index-replace/src/java/org/apache/nutch/indexer/replace/ReplaceIndexer.java
+++ 
b/src/plugin/index-replace/src/java/org/apache/nutch/indexer/replace/ReplaceIndexer.java
@@ -201,7 +201,7 @@ public class ReplaceIndexer implements IndexingFilter {
                   continue;
                 }
               }
-              Integer iFlags = (flags > 0) ? new Integer(flags) : null;
+              Integer iFlags = (flags > 0) ? Integer.valueOf(flags) : null;
 
               // Make a FieldReplacer out of these params.
               FieldReplacer fr = new FieldReplacer(fieldName, toFieldName,
diff --git 
a/src/plugin/indexer-cloudsearch/src/java/org/apache/nutch/indexwriter/cloudsearch/CloudSearchIndexWriter.java
 
b/src/plugin/indexer-cloudsearch/src/java/org/apache/nutch/indexwriter/cloudsearch/CloudSearchIndexWriter.java
index 3973485..2d72d9f 100644
--- 
a/src/plugin/indexer-cloudsearch/src/java/org/apache/nutch/indexwriter/cloudsearch/CloudSearchIndexWriter.java
+++ 
b/src/plugin/indexer-cloudsearch/src/java/org/apache/nutch/indexwriter/cloudsearch/CloudSearchIndexWriter.java
@@ -311,6 +311,7 @@ public class CloudSearchIndexWriter implements IndexWriter {
       batch.setContentLength((long) bb.length);
       batch.setContentType(ContentType.Applicationjson);
       batch.setDocuments(inputStream);
+      @SuppressWarnings("unused")
       UploadDocumentsResult result = client.uploadDocuments(batch);
     } catch (Exception e) {
       LOG.error("Exception while sending batch", e);
diff --git 
a/src/plugin/indexer-dummy/src/java/org/apache/nutch/indexwriter/dummy/DummyIndexWriter.java
 
b/src/plugin/indexer-dummy/src/java/org/apache/nutch/indexwriter/dummy/DummyIndexWriter.java
index 064d8f6..7a40036 100644
--- 
a/src/plugin/indexer-dummy/src/java/org/apache/nutch/indexwriter/dummy/DummyIndexWriter.java
+++ 
b/src/plugin/indexer-dummy/src/java/org/apache/nutch/indexwriter/dummy/DummyIndexWriter.java
@@ -21,14 +21,10 @@ import java.io.BufferedWriter;
 import java.io.IOException;
 import java.io.FileWriter;
 import java.io.Writer;
-import java.util.Map;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.nutch.indexer.IndexWriter;
 import org.apache.nutch.indexer.IndexWriterParams;
-import org.apache.nutch.indexer.IndexerMapReduce;
 import org.apache.nutch.indexer.NutchDocument;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git 
a/src/plugin/indexer-elastic-rest/src/java/org/apache/nutch/indexwriter/elasticrest/ElasticRestIndexWriter.java
 
b/src/plugin/indexer-elastic-rest/src/java/org/apache/nutch/indexwriter/elasticrest/ElasticRestIndexWriter.java
index 3bd9d41..f40f0b8 100644
--- 
a/src/plugin/indexer-elastic-rest/src/java/org/apache/nutch/indexwriter/elasticrest/ElasticRestIndexWriter.java
+++ 
b/src/plugin/indexer-elastic-rest/src/java/org/apache/nutch/indexwriter/elasticrest/ElasticRestIndexWriter.java
@@ -14,9 +14,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
-//TODO refactor the dependencies out of root ivy file
-
 package org.apache.nutch.indexwriter.elasticrest;
 
 import io.searchbox.client.JestClient;
@@ -54,11 +51,9 @@ import java.security.KeyStoreException;
 import java.security.NoSuchAlgorithmException;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.LinkedHashSet;
-import java.util.List;
 import java.util.Set;
 import java.util.Date;
 import java.util.concurrent.ExecutionException;
diff --git 
a/src/plugin/indexer-elastic/src/java/org/apache/nutch/indexwriter/elastic/ElasticIndexWriter.java
 
b/src/plugin/indexer-elastic/src/java/org/apache/nutch/indexwriter/elastic/ElasticIndexWriter.java
index 1540241..5fa2def 100644
--- 
a/src/plugin/indexer-elastic/src/java/org/apache/nutch/indexwriter/elastic/ElasticIndexWriter.java
+++ 
b/src/plugin/indexer-elastic/src/java/org/apache/nutch/indexwriter/elastic/ElasticIndexWriter.java
@@ -159,6 +159,7 @@ public class ElasticIndexWriter implements IndexWriter {
 
     // Prefer TransportClient
     if (hosts != null && port > 1) {
+      @SuppressWarnings("resource")
       TransportClient transportClient = new PreBuiltTransportClient(settings);
 
       for (String host : hosts)
diff --git 
a/src/plugin/indexer-elastic/src/test/org/apache/nutch/indexwriter/elastic/TestElasticIndexWriter.java
 
b/src/plugin/indexer-elastic/src/test/org/apache/nutch/indexwriter/elastic/TestElasticIndexWriter.java
index dc59cd4..6fb1ab2 100644
--- 
a/src/plugin/indexer-elastic/src/test/org/apache/nutch/indexwriter/elastic/TestElasticIndexWriter.java
+++ 
b/src/plugin/indexer-elastic/src/test/org/apache/nutch/indexwriter/elastic/TestElasticIndexWriter.java
@@ -136,6 +136,7 @@ public class TestElasticIndexWriter {
   public void testBulkMaxDocs() throws IOException {
     int numDocs = 10;
     conf.setInt(ElasticConstants.MAX_BULK_DOCS, numDocs);
+    @SuppressWarnings("unused")
     Job job = Job.getInstance(conf);
 
     Map<String, String> parameters = new HashMap<>();
@@ -175,6 +176,7 @@ public class TestElasticIndexWriter {
     int numDocs = testMaxBulkLength / (key.length() + value.length());
 
     conf.setInt(ElasticConstants.MAX_BULK_LENGTH, testMaxBulkLength);
+    @SuppressWarnings("unused")
     Job job = Job.getInstance(conf);
 
     Map<String, String> parameters = new HashMap<>();
@@ -207,6 +209,7 @@ public class TestElasticIndexWriter {
     int numDocs = 10;
     conf.setInt(ElasticConstants.MAX_BULK_DOCS, numDocs);
 
+    @SuppressWarnings("unused")
     Job job = Job.getInstance(conf);
 
     Map<String, String> parameters = new HashMap<>();
diff --git 
a/src/plugin/indexer-rabbit/src/java/org/apache/nutch/indexwriter/rabbit/RabbitDocument.java
 
b/src/plugin/indexer-rabbit/src/java/org/apache/nutch/indexwriter/rabbit/RabbitDocument.java
index dd0c309..1f08d42 100644
--- 
a/src/plugin/indexer-rabbit/src/java/org/apache/nutch/indexwriter/rabbit/RabbitDocument.java
+++ 
b/src/plugin/indexer-rabbit/src/java/org/apache/nutch/indexwriter/rabbit/RabbitDocument.java
@@ -24,6 +24,7 @@ import java.util.List;
 class RabbitDocument {
   private List<RabbitDocumentField> fields;
 
+  @SuppressWarnings("unused")
   private float documentBoost;
 
   RabbitDocument() {
@@ -49,6 +50,7 @@ class RabbitDocument {
 
   static class RabbitDocumentField {
     private String key;
+    @SuppressWarnings("unused")
     private float weight;
     private List<Object> values;
 
diff --git 
a/src/plugin/mimetype-filter/src/java/org/apache/nutch/indexer/filter/MimeTypeIndexingFilter.java
 
b/src/plugin/mimetype-filter/src/java/org/apache/nutch/indexer/filter/MimeTypeIndexingFilter.java
index 99c59a6..cf92a7d 100644
--- 
a/src/plugin/mimetype-filter/src/java/org/apache/nutch/indexer/filter/MimeTypeIndexingFilter.java
+++ 
b/src/plugin/mimetype-filter/src/java/org/apache/nutch/indexer/filter/MimeTypeIndexingFilter.java
@@ -211,6 +211,7 @@ public class MimeTypeIndexingFilter implements 
IndexingFilter {
    */
   public static void main(String[] args) throws IOException, IndexingException 
{
     Option helpOpt = new Option("h", "help", false, "show this help message");
+    @SuppressWarnings("static-access")
     Option rulesOpt = OptionBuilder.withArgName("file").hasArg()
         .withDescription(
             "Rules file to be used in the tests relative to the conf 
directory")
diff --git 
a/src/plugin/mimetype-filter/src/test/org/apache/nutch/indexer/filter/MimeTypeIndexingFilterTest.java
 
b/src/plugin/mimetype-filter/src/test/org/apache/nutch/indexer/filter/MimeTypeIndexingFilterTest.java
index bca230f..4522f99 100644
--- 
a/src/plugin/mimetype-filter/src/test/org/apache/nutch/indexer/filter/MimeTypeIndexingFilterTest.java
+++ 
b/src/plugin/mimetype-filter/src/test/org/apache/nutch/indexer/filter/MimeTypeIndexingFilterTest.java
@@ -45,7 +45,6 @@ public class MimeTypeIndexingFilterTest {
   private MimeTypeIndexingFilter filter = new MimeTypeIndexingFilter();
   private String[] MIME_TYPES = { "text/html", "image/png", "application/pdf" 
};
   private ParseImpl[] parses = new ParseImpl[MIME_TYPES.length];
-  private String sampleDir = System.getProperty("test.data", ".");
 
   @Before
   public void setUp() throws Exception {
diff --git 
a/src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java 
b/src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java
index 78cd257..0d1d17e 100644
--- a/src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java
+++ b/src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java
@@ -344,6 +344,7 @@ public class HtmlParser implements Parser {
     String url = "file:" + name;
     File file = new File(name);
     byte[] bytes = new byte[(int) file.length()];
+    @SuppressWarnings("resource")
     DataInputStream in = new DataInputStream(new FileInputStream(file));
     in.readFully(bytes);
     Configuration conf = NutchConfiguration.create();
diff --git 
a/src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java 
b/src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java
index 1c7d480..81d4485 100644
--- a/src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java
+++ b/src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java
@@ -246,7 +246,7 @@ class ExtractText extends SWFTagTypesImpl {
       int[] codes) throws IOException {
     // System.out.println("-defineFontInfo id=" + fontId + ", name=" +
     // fontName);
-    fontCodes.put(new Integer(fontId), codes);
+    fontCodes.put(Integer.valueOf(fontId), codes);
   }
 
   // XXX too much hassle for too little return ... we cannot guess character
@@ -263,7 +263,7 @@ class ExtractText extends SWFTagTypesImpl {
       int numGlyphs, int ascent, int descent, int leading, int[] codes,
       int[] advances, Rect[] bounds, int[] kernCodes1, int[] kernCodes2,
       int[] kernAdjustments) throws IOException {
-    fontCodes.put(new Integer(id), (codes != null) ? codes : new int[0]);
+    fontCodes.put(Integer.valueOf(id), (codes != null) ? codes : new int[0]);
 
     return null;
   }
diff --git 
a/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/BoilerpipeExtractorRepository.java
 
b/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/BoilerpipeExtractorRepository.java
index a4146b3..8b6108d 100644
--- 
a/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/BoilerpipeExtractorRepository.java
+++ 
b/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/BoilerpipeExtractorRepository.java
@@ -41,7 +41,7 @@ class BoilerpipeExtractorRepository {
           Class extractorClass = loader.loadClass(boilerpipeExtractorName);
 
           // Add an instance to the repository
-          extractorRepository.put(boilerpipeExtractorName, 
(BoilerpipeExtractor)extractorClass.newInstance());
+          extractorRepository.put(boilerpipeExtractorName, 
(BoilerpipeExtractor)extractorClass.getConstructor().newInstance());
 
         } catch (ClassNotFoundException e) {
           LOG.error("BoilerpipeExtractor " + boilerpipeExtractorName + " not 
found!");
diff --git 
a/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java 
b/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java
index 40d82bc..e346940 100644
--- a/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java
+++ b/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java
@@ -277,7 +277,7 @@ public class TikaParser implements 
org.apache.nutch.parse.Parser {
           throw new RuntimeException("Class " + htmlmapperClassName
               + " does not implement HtmlMapper");
         }
-        HTMLMapper = (HtmlMapper) HTMLMapperClass.newInstance();
+        HTMLMapper = (HtmlMapper) 
HTMLMapperClass.getConstructor().newInstance();
       } catch (Exception e) {
         LOG.error("Can't generate instance for class " + htmlmapperClassName);
         throw new RuntimeException("Can't generate instance for class "
diff --git 
a/src/plugin/parse-tika/src/test/org/apache/nutch/parse/tika/TestFeedParser.java
 
b/src/plugin/parse-tika/src/test/org/apache/nutch/parse/tika/TestFeedParser.java
index 2677395..3a4d70a 100644
--- 
a/src/plugin/parse-tika/src/test/org/apache/nutch/parse/tika/TestFeedParser.java
+++ 
b/src/plugin/parse-tika/src/test/org/apache/nutch/parse/tika/TestFeedParser.java
@@ -17,12 +17,8 @@
 
 package org.apache.nutch.parse.tika;
 
-import java.lang.invoke.MethodHandles;
-
 import org.junit.Assert;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -54,9 +50,6 @@ public class TestFeedParser {
 
   private String[] sampleFiles = { "rsstest.rss" };
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   /**
    * <p>
    * The test method: tests out the following 2 asserts:
diff --git 
a/src/plugin/parsefilter-regex/src/java/org/apache/nutch/parsefilter/regex/RegexParseFilter.java
 
b/src/plugin/parsefilter-regex/src/java/org/apache/nutch/parsefilter/regex/RegexParseFilter.java
index 2209ceb..c2661a5 100644
--- 
a/src/plugin/parsefilter-regex/src/java/org/apache/nutch/parsefilter/regex/RegexParseFilter.java
+++ 
b/src/plugin/parsefilter-regex/src/java/org/apache/nutch/parsefilter/regex/RegexParseFilter.java
@@ -55,7 +55,6 @@ public class RegexParseFilter implements HtmlParseFilter {
   private String regexFile = null;
   
   private Configuration conf;
-  private DocumentFragment doc;
   
   private static final Map<String,RegexRule> rules = new HashMap<>();
   
diff --git 
a/src/plugin/parsefilter-regex/src/test/org/apache/nutch/parsefilter/regex/TestRegexParseFilter.java
 
b/src/plugin/parsefilter-regex/src/test/org/apache/nutch/parsefilter/regex/TestRegexParseFilter.java
index 7bf21c4..238d300 100644
--- 
a/src/plugin/parsefilter-regex/src/test/org/apache/nutch/parsefilter/regex/TestRegexParseFilter.java
+++ 
b/src/plugin/parsefilter-regex/src/test/org/apache/nutch/parsefilter/regex/TestRegexParseFilter.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nutch.parsefilter.regex;
 
-import java.net.MalformedURLException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.parse.Parse;
diff --git 
a/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileError.java
 
b/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileError.java
index 4fef340..2019de0 100644
--- 
a/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileError.java
+++ 
b/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileError.java
@@ -22,6 +22,7 @@ package org.apache.nutch.protocol.file;
  */
 public class FileError extends FileException {
 
+  @SuppressWarnings("unused")
   private int code;
 
   public int getCode(int code) {
diff --git 
a/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileResponse.java
 
b/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileResponse.java
index 4b6666a..ce98270 100644
--- 
a/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileResponse.java
+++ 
b/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileResponse.java
@@ -226,7 +226,7 @@ public class FileResponse {
     is.close();
 
     // set headers
-    headers.set(Response.CONTENT_LENGTH, new Long(size).toString());
+    headers.set(Response.CONTENT_LENGTH, Long.valueOf(size).toString());
     headers.set(Response.LAST_MODIFIED,
         HttpDateFormat.toString(f.lastModified()));
 
@@ -255,7 +255,7 @@ public class FileResponse {
 
     // set headers
     headers.set(Response.CONTENT_LENGTH,
-        new Integer(this.content.length).toString());
+        Integer.valueOf(this.content.length).toString());
     headers.set(Response.CONTENT_TYPE, "text/html");
     headers.set(Response.LAST_MODIFIED,
         HttpDateFormat.toString(f.lastModified()));
diff --git 
a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java 
b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java
index eeba776..6d21b50 100644
--- a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java
+++ b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java
@@ -182,6 +182,7 @@ public class Ftp implements Protocol {
   public static void main(String[] args) throws Exception {
     int timeout = Integer.MIN_VALUE;
     int maxContentLength = Integer.MIN_VALUE;
+    @SuppressWarnings("unused")
     String logLevel = "info";
     boolean followTalk = false;
     boolean keepConnection = false;
diff --git 
a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpError.java 
b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpError.java
index b63a67e..558747a 100644
--- 
a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpError.java
+++ 
b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpError.java
@@ -22,6 +22,7 @@ package org.apache.nutch.protocol.ftp;
  */
 public class FtpError extends FtpException {
 
+  @SuppressWarnings("unused")
   private int code;
 
   public int getCode(int code) {
diff --git 
a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java
 
b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java
index 51bc6bf..07adb4c 100644
--- 
a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java
+++ 
b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java
@@ -320,7 +320,7 @@ public class FtpResponse {
 
       FTPFile ftpFile = (FTPFile) list.get(0);
       this.headers.set(Response.CONTENT_LENGTH,
-          new Long(ftpFile.getSize()).toString());
+          Long.valueOf(ftpFile.getSize()).toString());
       this.headers.set(Response.LAST_MODIFIED,
           HttpDateFormat.toString(ftpFile.getTimestamp()));
       // don't retrieve the file if not changed.
@@ -367,7 +367,7 @@ public class FtpResponse {
 
       FTPFile ftpFile = (FTPFile) list.get(0);
       this.headers.set(Response.CONTENT_LENGTH,
-          new Long(ftpFile.getSize()).toString());
+          Long.valueOf(ftpFile.getSize()).toString());
       // this.headers.put("content-type", "text/html");
       this.headers.set(Response.LAST_MODIFIED,
           HttpDateFormat.toString(ftpFile.getTimestamp()));
@@ -428,7 +428,7 @@ public class FtpResponse {
       ftp.client.retrieveList(null, list, ftp.maxContentLength, ftp.parser);
       this.content = list2html(list, path, "/".equals(path) ? false : true);
       this.headers.set(Response.CONTENT_LENGTH,
-          new Integer(this.content.length).toString());
+          Integer.valueOf(this.content.length).toString());
       this.headers.set(Response.CONTENT_TYPE, "text/html");
       // this.headers.put("Last-Modified", null);
 
@@ -452,7 +452,7 @@ public class FtpResponse {
 
       this.content = list2html(list, path, "/".equals(path) ? false : true);
       this.headers.set(Response.CONTENT_LENGTH,
-          new Integer(this.content.length).toString());
+          Integer.valueOf(this.content.length).toString());
       this.headers.set(Response.CONTENT_TYPE, "text/html");
       // this.headers.put("Last-Modified", null);
 
diff --git 
a/src/plugin/protocol-htmlunit/src/java/org/apache/nutch/protocol/htmlunit/HttpResponse.java
 
b/src/plugin/protocol-htmlunit/src/java/org/apache/nutch/protocol/htmlunit/HttpResponse.java
index 19c00fd..361b41e 100644
--- 
a/src/plugin/protocol-htmlunit/src/java/org/apache/nutch/protocol/htmlunit/HttpResponse.java
+++ 
b/src/plugin/protocol-htmlunit/src/java/org/apache/nutch/protocol/htmlunit/HttpResponse.java
@@ -51,7 +51,9 @@ public class HttpResponse implements Response {
   private Configuration conf;
   private HttpBase http;
   private URL url;
+  @SuppressWarnings("unused")
   private String orig;
+  @SuppressWarnings("unused")
   private String base;
   private byte[] content;
   private int code;
diff --git 
a/src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/Http.java 
b/src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/Http.java
old mode 100755
new mode 100644
diff --git 
a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java
 
b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java
index 2102f80..44683cc 100644
--- 
a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java
+++ 
b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java
@@ -20,7 +20,6 @@
 
 package org.apache.nutch.protocol.httpclient;
 
-import java.lang.invoke.MethodHandles;
 import java.security.KeyStore;
 import java.security.KeyStoreException;
 import java.security.NoSuchAlgorithmException;
@@ -30,16 +29,10 @@ import java.security.cert.X509Certificate;
 import javax.net.ssl.TrustManagerFactory;
 import javax.net.ssl.TrustManager;
 import javax.net.ssl.X509TrustManager;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class DummyX509TrustManager implements X509TrustManager {
   private X509TrustManager standardTrustManager = null;
 
-  /** Logger object for this class. */
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   /**
    * Constructor for DummyX509TrustManager.
    */
diff --git 
a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java
 
b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java
index e02d904..c185f9b 100644
--- 
a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java
+++ 
b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java
@@ -130,7 +130,7 @@ public class Http extends HttpBase {
    */
   public void setConf(Configuration conf) {
     super.setConf(conf);
-    this.conf = conf;
+    Http.conf = conf;
     this.maxThreadsTotal = conf.getInt("fetcher.threads.fetch", 10);
     this.proxyUsername = conf.get("http.proxy.username", "");
     this.proxyPassword = conf.get("http.proxy.password", "");
diff --git 
a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java
 
b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java
index 35d6bd5..506902d 100644
--- 
a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java
+++ 
b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java
@@ -51,6 +51,7 @@ public class HttpBasicAuthentication implements 
HttpAuthentication,
   private static Map<String, HttpBasicAuthentication> authMap = new 
TreeMap<String, HttpBasicAuthentication>();
 
   private Configuration conf = null;
+  @SuppressWarnings("unused")
   private String challenge = null;
   private ArrayList<String> credentials = null;
   private String realm = null;
diff --git 
a/src/plugin/protocol-interactiveselenium/src/java/org/apache/nutch/protocol/interactiveselenium/HttpResponse.java
 
b/src/plugin/protocol-interactiveselenium/src/java/org/apache/nutch/protocol/interactiveselenium/HttpResponse.java
index 7f961d9..6d91b33 100644
--- 
a/src/plugin/protocol-interactiveselenium/src/java/org/apache/nutch/protocol/interactiveselenium/HttpResponse.java
+++ 
b/src/plugin/protocol-interactiveselenium/src/java/org/apache/nutch/protocol/interactiveselenium/HttpResponse.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.PushbackInputStream;
+import java.lang.reflect.InvocationTargetException;
 import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.net.URL;
@@ -342,7 +343,11 @@ public class HttpResponse implements Response {
     for (int i = 0; i < handlerNames.length; i++) {
         try {
             String classToLoad = this.getClass().getPackage().getName() + 
".handlers." + handlerNames[i];
-            handlers[i] = 
InteractiveSeleniumHandler.class.cast(Class.forName(classToLoad).newInstance());
+            try {
+              handlers[i] = 
InteractiveSeleniumHandler.class.cast(Class.forName(classToLoad).getConstructor().newInstance());
+            } catch (IllegalArgumentException | InvocationTargetException | 
NoSuchMethodException | SecurityException e) {
+              e.printStackTrace();
+            }
             Http.LOG.info("Successfully loaded " + classToLoad);
         } catch (ClassNotFoundException e) {
             Http.LOG.info("Unable to load Handler class for: " + 
handlerNames[i]);
diff --git 
a/src/plugin/protocol-okhttp/src/java/org/apache/nutch/protocol/okhttp/OkHttp.java
 
b/src/plugin/protocol-okhttp/src/java/org/apache/nutch/protocol/okhttp/OkHttp.java
old mode 100755
new mode 100644
diff --git 
a/src/plugin/scoring-orphan/src/java/org/apache/nutch/scoring/orphan/OrphanScoringFilter.java
 
b/src/plugin/scoring-orphan/src/java/org/apache/nutch/scoring/orphan/OrphanScoringFilter.java
index 24cc366..7900259 100644
--- 
a/src/plugin/scoring-orphan/src/java/org/apache/nutch/scoring/orphan/OrphanScoringFilter.java
+++ 
b/src/plugin/scoring-orphan/src/java/org/apache/nutch/scoring/orphan/OrphanScoringFilter.java
@@ -40,7 +40,6 @@ public class OrphanScoringFilter extends 
AbstractScoringFilter {
 
   public static Text ORPHAN_KEY_WRITABLE = new Text("_orphan_");
 
-  private Configuration conf;
   private static int DEFAULT_GONE_TIME = 30 * 24 * 60 * 60;
   private static int DEFAULT_ORPHAN_TIME = 40 * 24 * 60 * 60;
 
diff --git 
a/src/plugin/scoring-similarity/src/java/org/apache/nutch/scoring/similarity/cosine/Model.java
 
b/src/plugin/scoring-similarity/src/java/org/apache/nutch/scoring/similarity/cosine/Model.java
index 05b85da..b1c56d0 100644
--- 
a/src/plugin/scoring-similarity/src/java/org/apache/nutch/scoring/similarity/cosine/Model.java
+++ 
b/src/plugin/scoring-similarity/src/java/org/apache/nutch/scoring/similarity/cosine/Model.java
@@ -172,7 +172,6 @@ public class Model {
     String[] ngramStr = conf.getStrings("scoring.similarity.ngrams", "1,1");
     //mingram
     ngramArr[0] = Integer.parseInt(ngramStr[0]);
-    int maxgram;
     if (ngramStr.length > 1) {
       //maxgram
       ngramArr[1] = Integer.parseInt(ngramStr[1]);
diff --git 
a/src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java
 
b/src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java
index 6bad964..898d314 100644
--- 
a/src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java
+++ 
b/src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java
@@ -16,15 +16,10 @@
  */
 package org.apache.nutch.indexer.subcollection;
 
-import java.lang.invoke.MethodHandles;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.nutch.parse.Parse;
 import org.apache.nutch.util.NutchConfiguration;
 
@@ -77,12 +72,6 @@ public class SubcollectionIndexingFilter extends Configured 
implements
   public static String metadataSource = "subcollection";
 
   /**
-   * Logger
-   */
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
-  /**
    * "Mark" document to be a part of subcollection
    * 
    * @param doc
diff --git 
a/src/plugin/urlfilter-ignoreexempt/src/java/org/apache/nutch/urlfilter/ignoreexempt/ExemptionUrlFilter.java
 
b/src/plugin/urlfilter-ignoreexempt/src/java/org/apache/nutch/urlfilter/ignoreexempt/ExemptionUrlFilter.java
index 07523fe..675d857 100644
--- 
a/src/plugin/urlfilter-ignoreexempt/src/java/org/apache/nutch/urlfilter/ignoreexempt/ExemptionUrlFilter.java
+++ 
b/src/plugin/urlfilter-ignoreexempt/src/java/org/apache/nutch/urlfilter/ignoreexempt/ExemptionUrlFilter.java
@@ -20,10 +20,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLExemptionFilter;
 import org.apache.nutch.util.NutchConfiguration;
 import org.apache.nutch.urlfilter.regex.RegexURLFilter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.lang.invoke.MethodHandles;
 import java.io.IOException;
 import java.io.Reader;
 import java.util.regex.Pattern;
@@ -56,11 +52,8 @@ public class ExemptionUrlFilter extends RegexURLFilter
 
   public static final String DB_IGNORE_EXTERNAL_EXEMPTIONS_FILE
       = "db.ignore.external.exemptions.file";
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
 
   private List<Pattern> exemptions;
-  private Configuration conf;
 
   public List<Pattern> getExemptions() {
     return exemptions;
diff --git 
a/src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java
 
b/src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java
index fcd3255..3e3b8bc 100644
--- 
a/src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java
+++ 
b/src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java
@@ -17,10 +17,6 @@
 
 package org.apache.nutch.indexer.urlmeta;
 
-import java.lang.invoke.MethodHandles;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -69,8 +65,6 @@ import org.apache.nutch.parse.Parse;
  */
 public class URLMetaIndexingFilter implements IndexingFilter {
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
   private static final String CONF_PROPERTY = "urlmeta.tags";
   private static String[] urlMetaTags;
   private Configuration conf;
diff --git 
a/src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java
 
b/src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java
index 543edfd..8c9efac 100644
--- 
a/src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java
+++ 
b/src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java
@@ -17,14 +17,11 @@
 
 package org.apache.nutch.scoring.urlmeta;
 
-import java.lang.invoke.MethodHandles;
 import java.util.Collection;
 import java.util.Map.Entry;
 import java.util.Iterator;
 import java.util.List;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
@@ -44,8 +41,6 @@ import org.apache.nutch.scoring.ScoringFilterException;
  */
 public class URLMetaScoringFilter extends Configured implements ScoringFilter {
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
   private static final String CONF_PROPERTY = "urlmeta.tags";
   private static String[] urlMetaTags;
   private Configuration conf;
diff --git 
a/src/plugin/urlnormalizer-ajax/src/java/org/apache/nutch/net/urlnormalizer/ajax/AjaxURLNormalizer.java
 
b/src/plugin/urlnormalizer-ajax/src/java/org/apache/nutch/net/urlnormalizer/ajax/AjaxURLNormalizer.java
index 3679426..7d1d3f0 100644
--- 
a/src/plugin/urlnormalizer-ajax/src/java/org/apache/nutch/net/urlnormalizer/ajax/AjaxURLNormalizer.java
+++ 
b/src/plugin/urlnormalizer-ajax/src/java/org/apache/nutch/net/urlnormalizer/ajax/AjaxURLNormalizer.java
@@ -114,7 +114,6 @@ public class AjaxURLNormalizer implements URLNormalizer {
    * @return String
    */
   protected String normalizeEscapedFragment(String urlString) throws 
MalformedURLException {
-    int pos = urlString.indexOf(ESCAPED_URL_PART);
     URL u = new URL(urlString);
     StringBuilder sb = new StringBuilder();
 
diff --git 
a/src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java
 
b/src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java
index 24f2e62..6a33690 100644
--- 
a/src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java
+++ 
b/src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java
@@ -279,7 +279,7 @@ public class BasicURLNormalizer extends Configured 
implements URLNormalizer {
 
       if (letter < 128 && unescapedCharacters[letter]) {
         // character should be unescaped in URLs
-        sb.append(new Character((char)letter));
+        sb.append(Character.valueOf((char)letter));
       } else {
         // Append the encoded character as uppercase
         sb.append(matcher.group().toUpperCase(Locale.ROOT));
diff --git 
a/src/plugin/urlnormalizer-protocol/src/test/org/apache/nutch/net/urlnormalizer/protocol/TestProtocolURLNormalizer.java
 
b/src/plugin/urlnormalizer-protocol/src/test/org/apache/nutch/net/urlnormalizer/protocol/TestProtocolURLNormalizer.java
index 8880628..22005ce 100644
--- 
a/src/plugin/urlnormalizer-protocol/src/test/org/apache/nutch/net/urlnormalizer/protocol/TestProtocolURLNormalizer.java
+++ 
b/src/plugin/urlnormalizer-protocol/src/test/org/apache/nutch/net/urlnormalizer/protocol/TestProtocolURLNormalizer.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nutch.net.urlnormalizer.protocol;
 
-import java.net.MalformedURLException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLNormalizers;
 import org.apache.nutch.util.NutchConfiguration;
diff --git 
a/src/plugin/urlnormalizer-querystring/src/java/org/apache/nutch/net/urlnormalizer/querystring/QuerystringURLNormalizer.java
 
b/src/plugin/urlnormalizer-querystring/src/java/org/apache/nutch/net/urlnormalizer/querystring/QuerystringURLNormalizer.java
index 04f61379..dbaf4d2 100644
--- 
a/src/plugin/urlnormalizer-querystring/src/java/org/apache/nutch/net/urlnormalizer/querystring/QuerystringURLNormalizer.java
+++ 
b/src/plugin/urlnormalizer-querystring/src/java/org/apache/nutch/net/urlnormalizer/querystring/QuerystringURLNormalizer.java
@@ -16,15 +16,12 @@
  */
 package org.apache.nutch.net.urlnormalizer.querystring;
 
-import java.lang.invoke.MethodHandles;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLNormalizer;
@@ -39,9 +36,6 @@ public class QuerystringURLNormalizer implements 
URLNormalizer {
 
   private Configuration conf;
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   public QuerystringURLNormalizer() {
   }
 
diff --git 
a/src/plugin/urlnormalizer-querystring/src/test/org/apache/nutch/net/urlnormalizer/querystring/TestQuerystringURLNormalizer.java
 
b/src/plugin/urlnormalizer-querystring/src/test/org/apache/nutch/net/urlnormalizer/querystring/TestQuerystringURLNormalizer.java
index b85c55d..e9a02cd 100644
--- 
a/src/plugin/urlnormalizer-querystring/src/test/org/apache/nutch/net/urlnormalizer/querystring/TestQuerystringURLNormalizer.java
+++ 
b/src/plugin/urlnormalizer-querystring/src/test/org/apache/nutch/net/urlnormalizer/querystring/TestQuerystringURLNormalizer.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nutch.net.urlnormalizer.querystring;
 
-import java.net.MalformedURLException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLNormalizers;
 import org.apache.nutch.util.NutchConfiguration;
diff --git 
a/src/plugin/urlnormalizer-slash/src/test/org/apache/nutch/net/urlnormalizer/slash/TestSlashURLNormalizer.java
 
b/src/plugin/urlnormalizer-slash/src/test/org/apache/nutch/net/urlnormalizer/slash/TestSlashURLNormalizer.java
index c3585e4..c5b3897 100644
--- 
a/src/plugin/urlnormalizer-slash/src/test/org/apache/nutch/net/urlnormalizer/slash/TestSlashURLNormalizer.java
+++ 
b/src/plugin/urlnormalizer-slash/src/test/org/apache/nutch/net/urlnormalizer/slash/TestSlashURLNormalizer.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nutch.net.urlnormalizer.slash;
 
-import java.net.MalformedURLException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLNormalizers;
 import org.apache.nutch.util.NutchConfiguration;
diff --git a/src/test/org/apache/nutch/crawl/CrawlDbUpdateTestDriver.java 
b/src/test/org/apache/nutch/crawl/CrawlDbUpdateTestDriver.java
index 583ed29..74c54d5 100644
--- a/src/test/org/apache/nutch/crawl/CrawlDbUpdateTestDriver.java
+++ b/src/test/org/apache/nutch/crawl/CrawlDbUpdateTestDriver.java
@@ -19,7 +19,6 @@ package org.apache.nutch.crawl;
 
 import java.lang.invoke.MethodHandles;
 import java.io.IOException;
-import java.net.URI;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -27,25 +26,8 @@ import java.util.Iterator;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configuration.IntegerRanges;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.JobID;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputCommitter;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.Partitioner;
 import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.Reducer.Context;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskInputOutputContext;
-import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/src/test/org/apache/nutch/crawl/CrawlDbUpdateUtil.java 
b/src/test/org/apache/nutch/crawl/CrawlDbUpdateUtil.java
index 0ad941c..1d1f1e7 100644
--- a/src/test/org/apache/nutch/crawl/CrawlDbUpdateUtil.java
+++ b/src/test/org/apache/nutch/crawl/CrawlDbUpdateUtil.java
@@ -34,7 +34,6 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Reducer;
diff --git a/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java 
b/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java
index f5ad82f..7188203 100644
--- a/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java
+++ b/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.io.MapFile;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.MapFile.Writer.Option;
-import org.apache.hadoop.mapreduce.Job;
 import org.apache.nutch.util.NutchConfiguration;
 import org.junit.After;
 import org.junit.Assert;
diff --git 
a/src/test/org/apache/nutch/segment/TestSegmentMergerCrawlDatums.java 
b/src/test/org/apache/nutch/segment/TestSegmentMergerCrawlDatums.java
index 93ef9c5..db7c067 100644
--- a/src/test/org/apache/nutch/segment/TestSegmentMergerCrawlDatums.java
+++ b/src/test/org/apache/nutch/segment/TestSegmentMergerCrawlDatums.java
@@ -74,8 +74,8 @@ public class TestSegmentMergerCrawlDatums {
   @Test
   public void testSingleRandomSequence() throws Exception {
     Assert.assertEquals(
-        new Byte(CrawlDatum.STATUS_FETCH_SUCCESS),
-        new Byte(executeSequence(CrawlDatum.STATUS_FETCH_GONE,
+        Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS),
+        Byte.valueOf(executeSequence(CrawlDatum.STATUS_FETCH_GONE,
             CrawlDatum.STATUS_FETCH_SUCCESS, 256, false)));
   }
 
@@ -109,9 +109,9 @@ public class TestSegmentMergerCrawlDatums {
     // Merge the segments and get status
     Path mergedSegment = merge(testDir, new Path[] { segment1, segment2,
         segment3, segment4, segment5, segment6, segment7, segment8 });
-    Byte status = new Byte(status = checkMergedSegment(testDir, 
mergedSegment));
+    Byte status = Byte.valueOf(status = checkMergedSegment(testDir, 
mergedSegment));
 
-    Assert.assertEquals(new Byte(CrawlDatum.STATUS_FETCH_SUCCESS), status);
+    Assert.assertEquals(Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS), status);
   }
 
   /**
@@ -147,8 +147,8 @@ public class TestSegmentMergerCrawlDatums {
   @Test
   public void testRandomTestSequenceWithRedirects() throws Exception {
     Assert.assertEquals(
-        new Byte(CrawlDatum.STATUS_FETCH_SUCCESS),
-        new Byte(executeSequence(CrawlDatum.STATUS_FETCH_GONE,
+        Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS),
+        Byte.valueOf(executeSequence(CrawlDatum.STATUS_FETCH_GONE,
             CrawlDatum.STATUS_FETCH_SUCCESS, 128, true)));
   }
 
@@ -172,9 +172,9 @@ public class TestSegmentMergerCrawlDatums {
     // Merge the segments and get status
     Path mergedSegment = merge(testDir, new Path[] { segment1, segment2,
         segment3 });
-    Byte status = new Byte(status = checkMergedSegment(testDir, 
mergedSegment));
+    Byte status = Byte.valueOf(status = checkMergedSegment(testDir, 
mergedSegment));
 
-    Assert.assertEquals(new Byte(CrawlDatum.STATUS_FETCH_SUCCESS), status);
+    Assert.assertEquals(Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS), status);
   }
 
   /**
@@ -192,9 +192,9 @@ public class TestSegmentMergerCrawlDatums {
 
     // Merge the segments and get status
     Path mergedSegment = merge(testDir, new Path[] { segment });
-    Byte status = new Byte(status = checkMergedSegment(testDir, 
mergedSegment));
+    Byte status = Byte.valueOf(status = checkMergedSegment(testDir, 
mergedSegment));
 
-    Assert.assertEquals(new Byte(CrawlDatum.STATUS_FETCH_SUCCESS), status);
+    Assert.assertEquals(Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS), status);
   }
 
   /**
@@ -214,9 +214,9 @@ public class TestSegmentMergerCrawlDatums {
 
     // Merge the segments and get status
     Path mergedSegment = merge(testDir, new Path[] { segment1, segment2 });
-    Byte status = new Byte(status = checkMergedSegment(testDir, 
mergedSegment));
+    Byte status = Byte.valueOf(status = checkMergedSegment(testDir, 
mergedSegment));
 
-    Assert.assertEquals(new Byte(CrawlDatum.STATUS_FETCH_SUCCESS), status);
+    Assert.assertEquals(Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS), status);
   }
 
   /**
diff --git a/src/test/org/apache/nutch/service/TestNutchServer.java 
b/src/test/org/apache/nutch/service/TestNutchServer.java
index 021d031..4d42f7b 100644
--- a/src/test/org/apache/nutch/service/TestNutchServer.java
+++ b/src/test/org/apache/nutch/service/TestNutchServer.java
@@ -21,7 +21,6 @@ import java.lang.invoke.MethodHandles;
 import javax.ws.rs.core.Response;
 
 import org.apache.cxf.jaxrs.client.WebClient;
-import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -53,6 +52,7 @@ public class TestNutchServer {
     else {
       LOG.info("Testing admin endpoint");
       WebClient client = WebClient.create(ENDPOINT_ADDRESS + server.getPort());
+      @SuppressWarnings("unused")
       Response response = client.path("admin").get();
       
//Assert.assertTrue(response.readEntity(String.class).contains("startDate"));
       response = client.path("stop").get();
diff --git a/src/test/org/apache/nutch/util/WritableTestUtils.java 
b/src/test/org/apache/nutch/util/WritableTestUtils.java
index 49bcfa9..0822603 100644
--- a/src/test/org/apache/nutch/util/WritableTestUtils.java
+++ b/src/test/org/apache/nutch/util/WritableTestUtils.java
@@ -44,7 +44,7 @@ public class WritableTestUtils {
     DataInputBuffer dib = new DataInputBuffer();
     dib.reset(dob.getData(), dob.getLength());
 
-    Writable after = (Writable) before.getClass().newInstance();
+    Writable after = (Writable) 
before.getClass().getConstructor().newInstance();
     if (conf != null) {
       ((Configurable) after).setConf(conf);
     }

Reply via email to