ctubbsii closed pull request #425: Separate mapreduce code from core
URL: https://github.com/apache/accumulo/pull/425
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/assemble/pom.xml b/assemble/pom.xml
index 9a0663ba70..0aba9cbcb2 100644
--- a/assemble/pom.xml
+++ b/assemble/pom.xml
@@ -110,6 +110,11 @@
       <artifactId>log4j</artifactId>
       <optional>true</optional>
     </dependency>
+    <dependency>
+      <groupId>org.apache.accumulo</groupId>
+      <artifactId>accumulo-client-mapreduce</artifactId>
+      <optional>true</optional>
+    </dependency>
     <dependency>
       <groupId>org.apache.accumulo</groupId>
       <artifactId>accumulo-core</artifactId>
diff --git a/client/mapreduce/.gitignore b/client/mapreduce/.gitignore
new file mode 100644
index 0000000000..e77a822fe7
--- /dev/null
+++ b/client/mapreduce/.gitignore
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Maven ignores
+/target/
+
+# IDE ignores
+/.settings/
+/.project
+/.classpath
+/.pydevproject
+/.idea
+/*.iml
+/nbproject/
+/nbactions.xml
+/nb-configuration.xml
diff --git a/client/mapreduce/pom.xml b/client/mapreduce/pom.xml
new file mode 100644
index 0000000000..2ecce2d556
--- /dev/null
+++ b/client/mapreduce/pom.xml
@@ -0,0 +1,161 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.accumulo</groupId>
+    <artifactId>accumulo-project</artifactId>
+    <version>2.0.0-SNAPSHOT</version>
+    <relativePath>../../pom.xml</relativePath>
+  </parent>
+  <artifactId>accumulo-client-mapreduce</artifactId>
+  <name>Apache Accumulo MapReduce APIs</name>
+  <description>Apache Accumulo core libraries.</description>
+  <dependencies>
+    <dependency>
+      <groupId>com.beust</groupId>
+      <artifactId>jcommander</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.accumulo</groupId>
+      <artifactId>accumulo-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.accumulo</groupId>
+      <artifactId>accumulo-fate</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>apilyzer-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>apilyzer</id>
+            <goals>
+              <goal>analyze</goal>
+            </goals>
+            <configuration>
+              <includes>
+                
<include>org[.]apache[.]accumulo[.]core[.]client[.]mapred(?:uce)?[.].*</include>
+              </includes>
+              <excludes>
+                <exclude>.*[.]impl[.].*</exclude>
+              </excludes>
+              <allows>
+                
<allow>org[.]apache[.]accumulo[.]core[.](?:client|data|security)[.](?!.*(impl|thrift|crypto).*).*</allow>
+                <!-- Not public API, but hard to get rid of. Pair is just so 
useful. -->
+                <allow>org[.]apache[.]accumulo[.]core[.]util[.]Pair</allow>
+                <!--Types from hadoop used in API. If adding a new type from
+                     Hadoop to the Accumulo API ensure its annotated as 
stable.-->
+                <allow>org[.]apache[.]hadoop[.]conf[.]Configuration</allow>
+                <allow>org[.]apache[.]hadoop[.]fs[.](FileSystem|Path)</allow>
+                
<allow>org[.]apache[.]hadoop[.]io[.](Text|Writable|WritableComparable|WritableComparator)</allow>
+                
<allow>org[.]apache[.]hadoop[.]mapred[.](JobConf|RecordReader|InputSplit|RecordWriter|Reporter)</allow>
+                
<allow>org[.]apache[.]hadoop[.]mapred[.]FileOutputFormat[$]Counter</allow>
+                
<allow>org[.]apache[.]hadoop[.]mapreduce[.](Job|JobContext|RecordReader|InputSplit|TaskAttemptContext|RecordWriter|OutputCommitter|TaskInputOutputContext)</allow>
+                
<allow>org[.]apache[.]hadoop[.]mapreduce[.]lib[.]output[.]FileOutputFormat[$]Counter</allow>
+                <allow>org[.]apache[.]hadoop[.]util[.]Progressable</allow>
+                
<allow>org[.]apache[.]hadoop[.]mapred[.](FileAlreadyExistsException|InvalidJobConfException)</allow>
+                <!--ugghhh-->
+                <allow>org[.]apache[.]log4j[.](Level|Logger)</allow>
+              </allows>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <profiles>
+    <profile>
+      <id>hadoop-default</id>
+      <activation>
+        <property>
+          <name>!hadoop.profile</name>
+        </property>
+      </activation>
+      <properties>
+        <hadoop.profile>2</hadoop.profile>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>hadoop2</id>
+      <activation>
+        <property>
+          <name>hadoop.profile</name>
+          <value>2</value>
+        </property>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>hadoop3</id>
+      <activation>
+        <property>
+          <name>hadoop.profile</name>
+          <value>3</value>
+        </property>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client-api</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client-runtime</artifactId>
+          <scope>runtime</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+</project>
diff --git a/client/mapreduce/src/main/findbugs/exclude-filter.xml 
b/client/mapreduce/src/main/findbugs/exclude-filter.xml
new file mode 100644
index 0000000000..108e26ef88
--- /dev/null
+++ b/client/mapreduce/src/main/findbugs/exclude-filter.xml
@@ -0,0 +1,29 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<FindBugsFilter>
+  <Match>
+    <!-- ignore intentional name shadowing -->
+    <Or>
+      <Class name="org.apache.accumulo.core.client.mapred.RangeInputSplit" />
+      <Class 
name="org.apache.accumulo.core.client.mapred.impl.BatchInputSplit" />
+    </Or>
+    <Or>
+      <Bug code="NM" pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
+      <Bug code="NM" pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
+    </Or>
+  </Match>
+</FindBugsFilter>
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
similarity index 91%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
index ac46580c3b..f58a8a320e 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
@@ -73,7 +73,6 @@
 import org.apache.accumulo.core.data.impl.KeyExtent;
 import org.apache.accumulo.core.master.state.tables.TableState;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.util.DeprecationUtil;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.InputFormat;
@@ -249,24 +248,6 @@ protected static AuthenticationToken 
getAuthenticationToken(JobConf job) {
     return ConfiguratorBase.unwrapAuthenticationToken(job, token);
   }
 
-  /**
-   * Configures a {@link org.apache.accumulo.core.client.ZooKeeperInstance} 
for this job.
-   *
-   * @param job
-   *          the Hadoop job instance to be configured
-   * @param instanceName
-   *          the Accumulo instance name
-   * @param zooKeepers
-   *          a comma-separated list of zookeeper servers
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #setConnectionInfo(JobConf, 
ConnectionInfo)} instead.
-   */
-  @Deprecated
-  public static void setZooKeeperInstance(JobConf job, String instanceName, 
String zooKeepers) {
-    setZooKeeperInstance(job,
-        
ClientConfiguration.create().withInstance(instanceName).withZkHosts(zooKeepers));
-  }
-
   /**
    * Configures a {@link org.apache.accumulo.core.client.ZooKeeperInstance} 
for this job.
    *
@@ -282,21 +263,6 @@ public static void setZooKeeperInstance(JobConf job, 
ClientConfiguration clientC
     InputConfigurator.setZooKeeperInstance(CLASS, job, clientConfig);
   }
 
-  /**
-   * Configures a {@link org.apache.accumulo.core.client.mock.MockInstance} 
for this job.
-   *
-   * @param job
-   *          the Hadoop job instance to be configured
-   * @param instanceName
-   *          the Accumulo instance name
-   * @since 1.5.0
-   * @deprecated since 1.8.0; use MiniAccumuloCluster or a standard mock 
framework
-   */
-  @Deprecated
-  public static void setMockInstance(JobConf job, String instanceName) {
-    InputConfigurator.setMockInstance(CLASS, job, instanceName);
-  }
-
   /**
    * Initializes an Accumulo {@link org.apache.accumulo.core.client.Instance} 
based on the
    * configuration.
@@ -491,24 +457,6 @@ private void setupIterators(JobConf job, ScannerBase 
scanner, String tableName,
         scanner.addScanIterator(iterator);
     }
 
-    /**
-     * Configures the iterators on a scanner for the given table name.
-     *
-     * @param job
-     *          the Hadoop job configuration
-     * @param scanner
-     *          the scanner for which to configure the iterators
-     * @param tableName
-     *          the table name for which the scanner is configured
-     * @since 1.6.0
-     * @deprecated since 1.7.0; Use {@link #jobIterators} instead.
-     */
-    @Deprecated
-    protected void setupIterators(JobConf job, Scanner scanner, String 
tableName,
-        RangeInputSplit split) {
-      setupIterators(job, (ScannerBase) scanner, tableName, split);
-    }
-
     /**
      * Initialize a scanner over the given input split using this task attempt 
configuration.
      */
@@ -591,9 +539,6 @@ public void initialize(InputSplit inSplit, JobConf job) 
throws IOException {
           if (isOffline) {
             scanner = new OfflineScanner(instance, new Credentials(principal, 
token),
                 Table.ID.of(baseSplit.getTableId()), authorizations);
-          } else if (DeprecationUtil.isMockInstance(instance)) {
-            scanner = instance.getConnector(principal, token)
-                .createScanner(baseSplit.getTableName(), authorizations);
           } else {
             ClientConfiguration clientConf = getClientConfiguration(job);
             ClientContext context = new ClientContext(instance, new 
Credentials(principal, token),
@@ -707,14 +652,10 @@ public float getProgress() throws IOException {
       Instance instance = getInstance(job);
       Table.ID tableId;
       // resolve table name to id once, and use id from this point forward
-      if (DeprecationUtil.isMockInstance(instance)) {
-        tableId = Table.ID.of("");
-      } else {
-        try {
-          tableId = Tables.getTableId(instance, tableName);
-        } catch (TableNotFoundException e) {
-          throw new IOException(e);
-        }
+      try {
+        tableId = Tables.getTableId(instance, tableName);
+      } catch (TableNotFoundException e) {
+        throw new IOException(e);
       }
 
       Authorizations auths = getScanAuthorizations(job);
@@ -762,13 +703,11 @@ public float getProgress() throws IOException {
               new Credentials(getPrincipal(job), getAuthenticationToken(job)),
               getClientConfiguration(job));
           while (!tl.binRanges(context, ranges, binnedRanges).isEmpty()) {
-            if (!DeprecationUtil.isMockInstance(instance)) {
-              String tableIdStr = tableId.canonicalID();
-              if (!Tables.exists(instance, tableId))
-                throw new TableDeletedException(tableIdStr);
-              if (Tables.getTableState(instance, tableId) == 
TableState.OFFLINE)
-                throw new TableOfflineException(instance, tableIdStr);
-            }
+            String tableIdStr = tableId.canonicalID();
+            if (!Tables.exists(instance, tableId))
+              throw new TableDeletedException(tableIdStr);
+            if (Tables.getTableState(instance, tableId) == TableState.OFFLINE)
+              throw new TableOfflineException(instance, tableIdStr);
             binnedRanges.clear();
             log.warn("Unable to locate bins for specified ranges. Retrying.");
             // sleep randomly between 100 and 200 ms
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormat.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormat.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormat.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormat.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
similarity index 91%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
index b9e377b2f4..1daf9e3d1a 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
@@ -44,7 +44,6 @@
 import org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator;
 import org.apache.accumulo.core.client.security.SecurityErrorCode;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
-import 
org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer;
 import org.apache.accumulo.core.client.security.tokens.DelegationToken;
 import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
@@ -198,28 +197,6 @@ protected static String getPrincipal(JobConf job) {
     return OutputConfigurator.getPrincipal(CLASS, job);
   }
 
-  /**
-   * Gets the serialized token class from either the configuration or the 
token file.
-   *
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #getAuthenticationToken(JobConf)} 
instead.
-   */
-  @Deprecated
-  protected static String getTokenClass(JobConf job) {
-    return getAuthenticationToken(job).getClass().getName();
-  }
-
-  /**
-   * Gets the serialized token from either the configuration or the token file.
-   *
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #getAuthenticationToken(JobConf)} 
instead.
-   */
-  @Deprecated
-  protected static byte[] getToken(JobConf job) {
-    return 
AuthenticationTokenSerializer.serialize(getAuthenticationToken(job));
-  }
-
   /**
    * Gets the authenticated token from either the specified token file or 
directly from the
    * configuration, whichever was used when the job was configured.
@@ -236,24 +213,6 @@ protected static AuthenticationToken 
getAuthenticationToken(JobConf job) {
     return ConfiguratorBase.unwrapAuthenticationToken(job, token);
   }
 
-  /**
-   * Configures a {@link ZooKeeperInstance} for this job.
-   *
-   * @param job
-   *          the Hadoop job instance to be configured
-   * @param instanceName
-   *          the Accumulo instance name
-   * @param zooKeepers
-   *          a comma-separated list of zookeeper servers
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #setConnectionInfo(JobConf, 
ConnectionInfo)} instead.
-   */
-  @Deprecated
-  public static void setZooKeeperInstance(JobConf job, String instanceName, 
String zooKeepers) {
-    setZooKeeperInstance(job,
-        
ClientConfiguration.create().withInstance(instanceName).withZkHosts(zooKeepers));
-  }
-
   /**
    * Configures a {@link ZooKeeperInstance} for this job.
    *
@@ -270,21 +229,6 @@ public static void setZooKeeperInstance(JobConf job, 
ClientConfiguration clientC
     OutputConfigurator.setZooKeeperInstance(CLASS, job, clientConfig);
   }
 
-  /**
-   * Configures a {@link org.apache.accumulo.core.client.mock.MockInstance} 
for this job.
-   *
-   * @param job
-   *          the Hadoop job instance to be configured
-   * @param instanceName
-   *          the Accumulo instance name
-   * @since 1.5.0
-   * @deprecated since 1.8.0; use MiniAccumuloCluster or a standard mock 
framework
-   */
-  @Deprecated
-  public static void setMockInstance(JobConf job, String instanceName) {
-    OutputConfigurator.setMockInstance(CLASS, job, instanceName);
-  }
-
   /**
    * Initializes an Accumulo {@link Instance} based on the configuration.
    *
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloRowInputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloRowInputFormat.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloRowInputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloRowInputFormat.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
similarity index 89%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
index e873ece4a0..542cdeb7a2 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
@@ -24,7 +24,6 @@
 import org.apache.accumulo.core.client.ClientSideIteratorScanner;
 import org.apache.accumulo.core.client.IsolatedScanner;
 import org.apache.accumulo.core.client.IteratorSetting;
-import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.client.ScannerBase;
 import org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
@@ -379,59 +378,5 @@ public static void setSamplerConfiguration(JobConf job, 
SamplerConfiguration sam
     protected List<IteratorSetting> jobIterators(JobConf job, String 
tableName) {
       return getIterators(job);
     }
-
-    /**
-     * Apply the configured iterators to the scanner.
-     *
-     * @param iterators
-     *          the iterators to set
-     * @param scanner
-     *          the scanner to configure
-     * @deprecated since 1.7.0; Use {@link #jobIterators} instead.
-     */
-    @Deprecated
-    protected void setupIterators(List<IteratorSetting> iterators, Scanner 
scanner) {
-      for (IteratorSetting iterator : iterators) {
-        scanner.addScanIterator(iterator);
-      }
-    }
-
-    /**
-     * Apply the configured iterators from the configuration to the scanner.
-     *
-     * @param job
-     *          the job configuration
-     * @param scanner
-     *          the scanner to configure
-     */
-    @Deprecated
-    protected void setupIterators(JobConf job, Scanner scanner) {
-      setupIterators(getIterators(job), scanner);
-    }
-  }
-
-  /**
-   * @deprecated since 1.5.2; Use {@link 
org.apache.accumulo.core.client.mapred.RangeInputSplit}
-   *             instead.
-   * @see org.apache.accumulo.core.client.mapred.RangeInputSplit
-   */
-  @Deprecated
-  public static class RangeInputSplit
-      extends org.apache.accumulo.core.client.mapred.RangeInputSplit {
-    public RangeInputSplit() {
-      super();
-    }
-
-    public RangeInputSplit(RangeInputSplit other) throws IOException {
-      super(other);
-    }
-
-    public RangeInputSplit(String table, String tableId, Range range, String[] 
locations) {
-      super(table, tableId, range, locations);
-    }
-
-    protected RangeInputSplit(String table, Range range, String[] locations) {
-      super(table, "", range, locations);
-    }
   }
 }
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/RangeInputSplit.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/RangeInputSplit.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapred/RangeInputSplit.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/RangeInputSplit.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapred/impl/BatchInputSplit.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/impl/BatchInputSplit.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapred/impl/BatchInputSplit.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/impl/BatchInputSplit.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
similarity index 90%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
index e10d57a88d..2b54efe53b 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
@@ -72,7 +72,6 @@
 import org.apache.accumulo.core.data.impl.KeyExtent;
 import org.apache.accumulo.core.master.state.tables.TableState;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.util.DeprecationUtil;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
@@ -234,29 +233,6 @@ protected static String getPrincipal(JobContext context) {
     return InputConfigurator.getPrincipal(CLASS, context.getConfiguration());
   }
 
-  /**
-   * Gets the serialized token class from either the configuration or the 
token file.
-   *
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #getAuthenticationToken(JobContext)} 
instead.
-   */
-  @Deprecated
-  protected static String getTokenClass(JobContext context) {
-    return getAuthenticationToken(context).getClass().getName();
-  }
-
-  /**
-   * Gets the serialized token from either the configuration or the token file.
-   *
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #getAuthenticationToken(JobContext)} 
instead.
-   */
-  @Deprecated
-  protected static byte[] getToken(JobContext context) {
-    return AuthenticationToken.AuthenticationTokenSerializer
-        .serialize(getAuthenticationToken(context));
-  }
-
   /**
    * Gets the authenticated token from either the specified token file or 
directly from the
    * configuration, whichever was used when the job was configured.
@@ -274,24 +250,6 @@ protected static AuthenticationToken 
getAuthenticationToken(JobContext context)
     return ConfiguratorBase.unwrapAuthenticationToken(context, token);
   }
 
-  /**
-   * Configures a {@link org.apache.accumulo.core.client.ZooKeeperInstance} 
for this job.
-   *
-   * @param job
-   *          the Hadoop job instance to be configured
-   * @param instanceName
-   *          the Accumulo instance name
-   * @param zooKeepers
-   *          a comma-separated list of zookeeper servers
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #setConnectionInfo(Job, 
ConnectionInfo)} instead.
-   */
-  @Deprecated
-  public static void setZooKeeperInstance(Job job, String instanceName, String 
zooKeepers) {
-    setZooKeeperInstance(job,
-        
ClientConfiguration.create().withInstance(instanceName).withZkHosts(zooKeepers));
-  }
-
   /**
    * Configures a {@link org.apache.accumulo.core.client.ZooKeeperInstance} 
for this job.
    *
@@ -308,21 +266,6 @@ public static void setZooKeeperInstance(Job job, 
ClientConfiguration clientConfi
     InputConfigurator.setZooKeeperInstance(CLASS, job.getConfiguration(), 
clientConfig);
   }
 
-  /**
-   * Configures a {@link org.apache.accumulo.core.client.mock.MockInstance} 
for this job.
-   *
-   * @param job
-   *          the Hadoop job instance to be configured
-   * @param instanceName
-   *          the Accumulo instance name
-   * @since 1.5.0
-   * @deprecated since 1.8.0; use MiniAccumuloCluster or a standard mock 
framework
-   */
-  @Deprecated
-  public static void setMockInstance(Job job, String instanceName) {
-    InputConfigurator.setMockInstance(CLASS, job.getConfiguration(), 
instanceName);
-  }
-
   /**
    * Initializes an Accumulo {@link org.apache.accumulo.core.client.Instance} 
based on the
    * configuration.
@@ -522,24 +465,6 @@ private void setupIterators(TaskAttemptContext context, 
ScannerBase scanner, Str
         scanner.addScanIterator(iterator);
     }
 
-    /**
-     * Configures the iterators on a scanner for the given table name.
-     *
-     * @param context
-     *          the Hadoop context for the configured job
-     * @param scanner
-     *          the scanner for which to configure the iterators
-     * @param tableName
-     *          the table name for which the scanner is configured
-     * @since 1.6.0
-     * @deprecated since 1.7.0; Use {@link #contextIterators} instead.
-     */
-    @Deprecated
-    protected void setupIterators(TaskAttemptContext context, Scanner scanner, 
String tableName,
-        RangeInputSplit split) {
-      setupIterators(context, (ScannerBase) scanner, tableName, split);
-    }
-
     @Override
     public void initialize(InputSplit inSplit, TaskAttemptContext attempt) 
throws IOException {
 
@@ -620,9 +545,6 @@ public void initialize(InputSplit inSplit, 
TaskAttemptContext attempt) throws IO
           if (isOffline) {
             scanner = new OfflineScanner(instance, new Credentials(principal, 
token),
                 Table.ID.of(split.getTableId()), authorizations);
-          } else if (DeprecationUtil.isMockInstance(instance)) {
-            scanner = instance.getConnector(principal, 
token).createScanner(split.getTableName(),
-                authorizations);
           } else {
             ClientConfiguration clientConf = getClientConfiguration(attempt);
             ClientContext context = new ClientContext(instance, new 
Credentials(principal, token),
@@ -753,14 +675,10 @@ public V getCurrentValue() throws IOException, 
InterruptedException {
       Instance instance = getInstance(context);
       Table.ID tableId;
       // resolve table name to id once, and use id from this point forward
-      if (DeprecationUtil.isMockInstance(instance)) {
-        tableId = Table.ID.of("");
-      } else {
-        try {
-          tableId = Tables.getTableId(instance, tableName);
-        } catch (TableNotFoundException e) {
-          throw new IOException(e);
-        }
+      try {
+        tableId = Tables.getTableId(instance, tableName);
+      } catch (TableNotFoundException e) {
+        throw new IOException(e);
       }
 
       Authorizations auths = getScanAuthorizations(context);
@@ -809,13 +727,11 @@ public V getCurrentValue() throws IOException, 
InterruptedException {
               new Credentials(getPrincipal(context), 
getAuthenticationToken(context)),
               getClientConfiguration(context));
           while (!tl.binRanges(clientContext, ranges, binnedRanges).isEmpty()) 
{
-            if (!DeprecationUtil.isMockInstance(instance)) {
-              String tableIdStr = tableId.canonicalID();
-              if (!Tables.exists(instance, tableId))
-                throw new TableDeletedException(tableIdStr);
-              if (Tables.getTableState(instance, tableId) == 
TableState.OFFLINE)
-                throw new TableOfflineException(instance, tableIdStr);
-            }
+            String tableIdStr = tableId.canonicalID();
+            if (!Tables.exists(instance, tableId))
+              throw new TableDeletedException(tableIdStr);
+            if (Tables.getTableState(instance, tableId) == TableState.OFFLINE)
+              throw new TableOfflineException(instance, tableIdStr);
             binnedRanges.clear();
             log.warn("Unable to locate bins for specified ranges. Retrying.");
             // sleep randomly between 100 and 200 ms
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormat.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormat.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormat.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormat.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
similarity index 92%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
index 355f826959..ae099c6d71 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
@@ -44,7 +44,6 @@
 import org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator;
 import org.apache.accumulo.core.client.security.SecurityErrorCode;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
-import 
org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer;
 import org.apache.accumulo.core.client.security.tokens.DelegationToken;
 import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
@@ -199,28 +198,6 @@ protected static String getPrincipal(JobContext context) {
     return OutputConfigurator.getPrincipal(CLASS, context.getConfiguration());
   }
 
-  /**
-   * Gets the serialized token class from either the configuration or the 
token file.
-   *
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #getAuthenticationToken(JobContext)} 
instead.
-   */
-  @Deprecated
-  protected static String getTokenClass(JobContext context) {
-    return getAuthenticationToken(context).getClass().getName();
-  }
-
-  /**
-   * Gets the serialized token from either the configuration or the token file.
-   *
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #getAuthenticationToken(JobContext)} 
instead.
-   */
-  @Deprecated
-  protected static byte[] getToken(JobContext context) {
-    return 
AuthenticationTokenSerializer.serialize(getAuthenticationToken(context));
-  }
-
   /**
    * Gets the authenticated token from either the specified token file or 
directly from the
    * configuration, whichever was used when the job was configured.
@@ -238,24 +215,6 @@ protected static AuthenticationToken 
getAuthenticationToken(JobContext context)
     return ConfiguratorBase.unwrapAuthenticationToken(context, token);
   }
 
-  /**
-   * Configures a {@link ZooKeeperInstance} for this job.
-   *
-   * @param job
-   *          the Hadoop job instance to be configured
-   * @param instanceName
-   *          the Accumulo instance name
-   * @param zooKeepers
-   *          a comma-separated list of zookeeper servers
-   * @since 1.5.0
-   * @deprecated since 1.6.0; Use {@link #setZooKeeperInstance(Job, 
ClientConfiguration)} instead.
-   */
-  @Deprecated
-  public static void setZooKeeperInstance(Job job, String instanceName, String 
zooKeepers) {
-    setZooKeeperInstance(job,
-        
ClientConfiguration.create().withInstance(instanceName).withZkHosts(zooKeepers));
-  }
-
   /**
    * Configures a {@link ZooKeeperInstance} for this job.
    *
@@ -272,20 +231,6 @@ public static void setZooKeeperInstance(Job job, 
ClientConfiguration clientConfi
     OutputConfigurator.setZooKeeperInstance(CLASS, job.getConfiguration(), 
clientConfig);
   }
 
-  /**
-   * Configures a {@link org.apache.accumulo.core.client.mock.MockInstance} 
for this job.
-   *
-   * @param job
-   *          the Hadoop job instance to be configured
-   * @param instanceName
-   *          the Accumulo instance name
-   * @since 1.5.0
-   */
-  @Deprecated
-  public static void setMockInstance(Job job, String instanceName) {
-    OutputConfigurator.setMockInstance(CLASS, job.getConfiguration(), 
instanceName);
-  }
-
   /**
    * Initializes an Accumulo {@link Instance} based on the configuration.
    *
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormat.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormat.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormat.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormat.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
similarity index 89%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
index 1b61dd8199..b466b08751 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
@@ -24,7 +24,6 @@
 import org.apache.accumulo.core.client.ClientSideIteratorScanner;
 import org.apache.accumulo.core.client.IsolatedScanner;
 import org.apache.accumulo.core.client.IteratorSetting;
-import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.client.ScannerBase;
 import org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
@@ -378,57 +377,5 @@ public static void setSamplerConfiguration(Job job, 
SamplerConfiguration sampler
     protected List<IteratorSetting> contextIterators(TaskAttemptContext 
context, String tableName) {
       return getIterators(context);
     }
-
-    /**
-     * Apply the configured iterators from the configuration to the scanner.
-     *
-     * @param context
-     *          the Hadoop context for the configured job
-     * @param scanner
-     *          the scanner to configure
-     * @deprecated since 1.7.0; Use {@link #contextIterators} instead.
-     */
-    @Deprecated
-    protected void setupIterators(TaskAttemptContext context, Scanner scanner) 
{
-      // tableName is given as null as it will be ignored in eventual call to 
#contextIterators
-      setupIterators(context, scanner, null, null);
-    }
-
-    /**
-     * Initialize a scanner over the given input split using this task attempt 
configuration.
-     *
-     * @deprecated since 1.7.0; Use {@link #contextIterators} instead.
-     */
-    @Deprecated
-    protected void setupIterators(TaskAttemptContext context, Scanner scanner,
-        org.apache.accumulo.core.client.mapreduce.RangeInputSplit split) {
-      setupIterators(context, scanner, null, split);
-    }
-  }
-
-  /**
-   * @deprecated since 1.5.2; Use {@link 
org.apache.accumulo.core.client.mapreduce.RangeInputSplit}
-   *             instead.
-   * @see org.apache.accumulo.core.client.mapreduce.RangeInputSplit
-   */
-  @Deprecated
-  public static class RangeInputSplit
-      extends org.apache.accumulo.core.client.mapreduce.RangeInputSplit {
-
-    public RangeInputSplit() {
-      super();
-    }
-
-    public RangeInputSplit(RangeInputSplit other) throws IOException {
-      super(other);
-    }
-
-    protected RangeInputSplit(String table, Range range, String[] locations) {
-      super(table, "", range, locations);
-    }
-
-    public RangeInputSplit(String table, String tableId, Range range, String[] 
locations) {
-      super(table, tableId, range, locations);
-    }
   }
 }
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputTableConfig.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/InputTableConfig.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputTableConfig.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/InputTableConfig.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
similarity index 90%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
index e56c1d9a6e..26af2442f5 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
@@ -45,7 +45,6 @@
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.sample.impl.SamplerConfigurationImpl;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.util.DeprecationUtil;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -62,7 +61,7 @@
   private TokenSource tokenSource;
   private String tokenFile;
   private AuthenticationToken token;
-  private Boolean offline, mockInstance, isolatedScan, localIterators;
+  private Boolean offline, isolatedScan, localIterators;
   private Authorizations auths;
   private Set<Pair<Text,Text>> fetchedColumns;
   private List<IteratorSetting> iterators;
@@ -159,8 +158,10 @@ public void readFields(DataInput in) throws IOException {
       localIterators = in.readBoolean();
     }
 
+    // ignore mock flag; it was removed
     if (in.readBoolean()) {
-      mockInstance = in.readBoolean();
+      throw new IllegalStateException(
+          "Mock flag was set in serialized RangeInputSplit, but mock Accumulo 
was removed");
     }
 
     if (in.readBoolean()) {
@@ -252,10 +253,8 @@ public void write(DataOutput out) throws IOException {
       out.writeBoolean(localIterators);
     }
 
-    out.writeBoolean(null != mockInstance);
-    if (null != mockInstance) {
-      out.writeBoolean(mockInstance);
-    }
+    // should be false to indicate that no mock flag was serialized; mock was 
removed
+    out.writeBoolean(false);
 
     out.writeBoolean(null != fetchedColumns);
     if (null != fetchedColumns) {
@@ -321,30 +320,10 @@ public void write(DataOutput out) throws IOException {
     }
   }
 
-  /**
-   * Use {@link #getTableName}
-   *
-   * @deprecated since 1.6.1, use getTableName() instead.
-   */
-  @Deprecated
-  public String getTable() {
-    return getTableName();
-  }
-
   public String getTableName() {
     return tableName;
   }
 
-  /**
-   * Use {@link #setTableName}
-   *
-   * @deprecated since 1.6.1, use setTableName() instead.
-   */
-  @Deprecated
-  public void setTable(String table) {
-    setTableName(table);
-  }
-
   public void setTableName(String table) {
     this.tableName = table;
   }
@@ -357,24 +336,11 @@ public String getTableId() {
     return tableId;
   }
 
-  /**
-   * @see #getInstance(ClientConfiguration)
-   * @deprecated since 1.7.0, use getInstance(ClientConfiguration) instead.
-   */
-  @Deprecated
-  public Instance getInstance() {
-    return getInstance(ClientConfiguration.loadDefault());
-  }
-
   public Instance getInstance(ClientConfiguration base) {
     if (null == instanceName) {
       return null;
     }
 
-    if (isMockInstance()) {
-      return DeprecationUtil.makeMockInstance(getInstanceName());
-    }
-
     if (null == zooKeepers) {
       return null;
     }
@@ -432,22 +398,6 @@ public void setLocations(String[] locations) {
     this.locations = Arrays.copyOf(locations, locations.length);
   }
 
-  /**
-   * @deprecated since 1.8.0; use MiniAccumuloCluster or a standard mock 
framework
-   */
-  @Deprecated
-  public Boolean isMockInstance() {
-    return mockInstance;
-  }
-
-  /**
-   * @deprecated since 1.8.0; use MiniAccumuloCluster or a standard mock 
framework
-   */
-  @Deprecated
-  public void setMockInstance(Boolean mockInstance) {
-    this.mockInstance = mockInstance;
-  }
-
   public Boolean isIsolatedScan() {
     return isolatedScan;
   }
@@ -522,7 +472,6 @@ public String toString() {
     sb.append(" authenticationTokenFile: ").append(tokenFile);
     sb.append(" Authorizations: ").append(auths);
     sb.append(" offlineScan: ").append(offline);
-    sb.append(" mockInstance: ").append(mockInstance);
     sb.append(" isolatedScan: ").append(isolatedScan);
     sb.append(" localIterators: ").append(localIterators);
     sb.append(" fetchColumns: ").append(fetchedColumns);
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplit.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplit.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplit.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplit.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/DelegationTokenStub.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/DelegationTokenStub.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/DelegationTokenStub.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/DelegationTokenStub.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java
similarity index 96%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java
index 5c5d26e1c7..53fea91717 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java
@@ -27,7 +27,6 @@
 import org.apache.accumulo.core.data.ByteSequence;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.util.DeprecationUtil;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Level;
 
@@ -42,7 +41,6 @@ public static void updateSplit(RangeInputSplit split, 
Instance instance,
       Authorizations auths, Level logLevel) {
     split.setInstanceName(instance.getInstanceName());
     split.setZooKeepers(instance.getZooKeepers());
-    DeprecationUtil.setMockInstance(split, 
DeprecationUtil.isMockInstance(instance));
 
     split.setPrincipal(principal);
     split.setToken(token);
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
similarity index 93%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
index 90812c9753..7bfd4f9afe 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
@@ -37,7 +37,6 @@
 import org.apache.accumulo.core.client.mapreduce.impl.DelegationTokenStub;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import 
org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer;
-import org.apache.accumulo.core.util.DeprecationUtil;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -342,32 +341,6 @@ public static void setZooKeeperInstance(Class<?> 
implementingClass, Configuratio
     }
   }
 
-  /**
-   * Configures a {@link org.apache.accumulo.core.client.mock.MockInstance} 
for this job.
-   *
-   * @param implementingClass
-   *          the class whose name will be used as a prefix for the property 
configuration key
-   * @param conf
-   *          the Hadoop configuration object to configure
-   * @param instanceName
-   *          the Accumulo instance name
-   * @since 1.6.0
-   * @deprecated since 1.8.0; use MiniAccumuloCluster or a standard mock 
framework
-   */
-  @Deprecated
-  public static void setMockInstance(Class<?> implementingClass, Configuration 
conf,
-      String instanceName) {
-    String key = enumToConfKey(implementingClass, InstanceOpts.TYPE);
-    if (!conf.get(key, "").isEmpty())
-      throw new IllegalStateException(
-          "Instance info can only be set once per job; it has already been 
configured with "
-              + conf.get(key));
-    conf.set(key, "MockInstance");
-
-    checkArgument(instanceName != null, "instanceName is null");
-    conf.set(enumToConfKey(implementingClass, InstanceOpts.NAME), 
instanceName);
-  }
-
   /**
    * Initializes an Accumulo {@link Instance} based on the configuration.
    *
@@ -381,16 +354,14 @@ public static void setMockInstance(Class<?> 
implementingClass, Configuration con
    */
   public static Instance getInstance(Class<?> implementingClass, Configuration 
conf) {
     String instanceType = conf.get(enumToConfKey(implementingClass, 
InstanceOpts.TYPE), "");
-    if ("MockInstance".equals(instanceType))
-      return DeprecationUtil
-          .makeMockInstance(conf.get(enumToConfKey(implementingClass, 
InstanceOpts.NAME)));
-    else if ("ZooKeeperInstance".equals(instanceType)) {
+    if ("ZooKeeperInstance".equals(instanceType)) {
       return new ZooKeeperInstance(getClientConfiguration(implementingClass, 
conf));
-    } else if (instanceType.isEmpty())
+    } else if (instanceType.isEmpty()) {
       throw new IllegalStateException(
           "Instance has not been configured for " + 
implementingClass.getSimpleName());
-    else
+    } else {
       throw new IllegalStateException("Unrecognized instance type " + 
instanceType);
+    }
   }
 
   /**
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/DistributedCacheHelper.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/DistributedCacheHelper.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/DistributedCacheHelper.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/DistributedCacheHelper.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/FileOutputConfigurator.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/FileOutputConfigurator.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/FileOutputConfigurator.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/FileOutputConfigurator.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
similarity index 91%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
index edb9634c4f..b3da02a198 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
@@ -50,13 +50,11 @@
 import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.impl.ClientContext;
 import org.apache.accumulo.core.client.impl.Credentials;
-import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
 import org.apache.accumulo.core.client.impl.Table;
 import org.apache.accumulo.core.client.impl.Tables;
 import org.apache.accumulo.core.client.impl.TabletLocator;
 import org.apache.accumulo.core.client.mapreduce.InputTableConfig;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.PartialKey;
 import org.apache.accumulo.core.data.Range;
@@ -69,7 +67,6 @@
 import org.apache.accumulo.core.sample.impl.SamplerConfigurationImpl;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.security.TablePermission;
-import org.apache.accumulo.core.util.DeprecationUtil;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.accumulo.core.util.TextUtil;
 import org.apache.hadoop.conf.Configuration;
@@ -727,9 +724,6 @@ public static InputTableConfig getInputTableConfig(Class<?> 
implementingClass, C
    */
   public static TabletLocator getTabletLocator(Class<?> implementingClass, 
Configuration conf,
       Table.ID tableId) throws TableNotFoundException {
-    String instanceType = conf.get(enumToConfKey(implementingClass, 
InstanceOpts.TYPE));
-    if ("MockInstance".equals(instanceType))
-      return DeprecationUtil.makeMockLocator();
     Instance instance = getInstance(implementingClass, conf);
     ClientConfiguration clientConf = getClientConfiguration(implementingClass, 
conf);
     ClientContext context = new ClientContext(instance,
@@ -753,7 +747,7 @@ public static Instance validateInstance(Class<?> 
implementingClass, Configuratio
     if (!isConnectorInfoSet(implementingClass, conf))
       throw new IOException("Input info has not been set.");
     String instanceKey = conf.get(enumToConfKey(implementingClass, 
InstanceOpts.TYPE));
-    if (!"MockInstance".equals(instanceKey) && 
!"ZooKeeperInstance".equals(instanceKey))
+    if (!"ZooKeeperInstance".equals(instanceKey))
       throw new IOException("Instance info has not been set.");
     return getInstance(implementingClass, conf);
   }
@@ -799,70 +793,6 @@ public static void validatePermissions(Class<?> 
implementingClass, Configuration
     }
   }
 
-  // InputFormat doesn't have the equivalent of OutputFormat's 
checkOutputSpecs(JobContext job)
-  /**
-   * Check whether a configuration is fully configured to be used with an 
Accumulo
-   * {@link org.apache.hadoop.mapreduce.InputFormat}.
-   *
-   * <p>
-   * The implementation (JobContext or JobConf which created the 
Configuration) needs to be used to
-   * extract the proper {@link AuthenticationToken} for {@link 
DelegationTokenImpl} support.
-   *
-   * @param implementingClass
-   *          the class whose name will be used as a prefix for the property 
configuration key
-   * @param conf
-   *          the Hadoop configuration object to configure
-   * @throws IOException
-   *           if the context is improperly configured
-   * @since 1.6.0
-   *
-   * @see #validateInstance(Class, Configuration)
-   * @see #validatePermissions(Class, Configuration, Connector)
-   */
-  @Deprecated
-  public static void validateOptions(Class<?> implementingClass, Configuration 
conf)
-      throws IOException {
-
-    Map<String,InputTableConfig> inputTableConfigs = 
getInputTableConfigs(implementingClass, conf);
-    if (!isConnectorInfoSet(implementingClass, conf))
-      throw new IOException("Input info has not been set.");
-    String instanceKey = conf.get(enumToConfKey(implementingClass, 
InstanceOpts.TYPE));
-    if (!"MockInstance".equals(instanceKey) && 
!"ZooKeeperInstance".equals(instanceKey))
-      throw new IOException("Instance info has not been set.");
-    // validate that we can connect as configured
-    try {
-      String principal = getPrincipal(implementingClass, conf);
-      AuthenticationToken token = getAuthenticationToken(implementingClass, 
conf);
-      Connector c = getInstance(implementingClass, 
conf).getConnector(principal, token);
-      if (!c.securityOperations().authenticateUser(principal, token))
-        throw new IOException("Unable to authenticate user");
-
-      if (getInputTableConfigs(implementingClass, conf).size() == 0)
-        throw new IOException("No table set.");
-
-      for (Map.Entry<String,InputTableConfig> tableConfig : 
inputTableConfigs.entrySet()) {
-        if 
(!c.securityOperations().hasTablePermission(getPrincipal(implementingClass, 
conf),
-            tableConfig.getKey(), TablePermission.READ))
-          throw new IOException("Unable to access table");
-      }
-      for (Map.Entry<String,InputTableConfig> tableConfigEntry : 
inputTableConfigs.entrySet()) {
-        InputTableConfig tableConfig = tableConfigEntry.getValue();
-        if (!tableConfig.shouldUseLocalIterators()) {
-          if (tableConfig.getIterators() != null) {
-            for (IteratorSetting iter : tableConfig.getIterators()) {
-              if (!c.tableOperations().testClassLoad(tableConfigEntry.getKey(),
-                  iter.getIteratorClass(), 
SortedKeyValueIterator.class.getName()))
-                throw new AccumuloException("Servers are unable to load " + 
iter.getIteratorClass()
-                    + " as a " + SortedKeyValueIterator.class.getName());
-            }
-          }
-        }
-      }
-    } catch (AccumuloException | TableNotFoundException | 
AccumuloSecurityException e) {
-      throw new IOException(e);
-    }
-  }
-
   /**
    * Returns the {@link 
org.apache.accumulo.core.client.mapreduce.InputTableConfig} for the
    * configuration based on the properties set using the single-table input 
methods.
diff --git 
a/core/src/main/java/org/apache/accumulo/core/cli/MapReduceClientOnDefaultTable.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnDefaultTable.java
similarity index 97%
rename from 
core/src/main/java/org/apache/accumulo/core/cli/MapReduceClientOnDefaultTable.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnDefaultTable.java
index d39554cc25..1eb1c303e0 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/cli/MapReduceClientOnDefaultTable.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnDefaultTable.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.accumulo.core.cli;
+package org.apache.accumulo.core.client.mapreduce.lib.impl;
 
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
diff --git 
a/core/src/main/java/org/apache/accumulo/core/cli/MapReduceClientOnRequiredTable.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnRequiredTable.java
similarity index 97%
rename from 
core/src/main/java/org/apache/accumulo/core/cli/MapReduceClientOnRequiredTable.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnRequiredTable.java
index 96ada8808e..ea440a94c5 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/cli/MapReduceClientOnRequiredTable.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnRequiredTable.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.accumulo.core.cli;
+package org.apache.accumulo.core.client.mapreduce.lib.impl;
 
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
diff --git 
a/core/src/main/java/org/apache/accumulo/core/cli/MapReduceClientOpts.java 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOpts.java
similarity index 97%
rename from 
core/src/main/java/org/apache/accumulo/core/cli/MapReduceClientOpts.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOpts.java
index d4eb23b2f6..6dfc84c5c1 100644
--- a/core/src/main/java/org/apache/accumulo/core/cli/MapReduceClientOpts.java
+++ 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOpts.java
@@ -14,8 +14,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.accumulo.core.cli;
+package org.apache.accumulo.core.client.mapreduce.lib.impl;
 
+import org.apache.accumulo.core.cli.ClientOpts;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/OutputConfigurator.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/OutputConfigurator.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/OutputConfigurator.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/OutputConfigurator.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/package-info.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/package-info.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/package-info.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/package-info.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/KeyRangePartitioner.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/KeyRangePartitioner.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/KeyRangePartitioner.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/KeyRangePartitioner.java
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitioner.java
 
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitioner.java
similarity index 100%
rename from 
core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitioner.java
rename to 
client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitioner.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormatTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormatTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormatTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormatTest.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormatTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormatTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormatTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormatTest.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormatTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormatTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormatTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormatTest.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java
similarity index 95%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java
index 7e5bf2eec6..9435277125 100644
--- 
a/core/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java
+++ 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java
@@ -33,7 +33,6 @@
 import org.apache.accumulo.core.iterators.user.SummingCombiner;
 import org.apache.accumulo.core.iterators.user.WholeRowIterator;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.util.DeprecationUtil;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Level;
@@ -89,7 +88,6 @@ public void testAllFieldsWritable() throws IOException {
     split.setToken(new PasswordToken("password"));
     split.setPrincipal("root");
     split.setInstanceName("instance");
-    DeprecationUtil.setMockInstance(split, true);
     split.setZooKeepers("localhost");
     split.setIterators(iterators);
     split.setLogLevel(Level.WARN);
@@ -115,8 +113,6 @@ public void testAllFieldsWritable() throws IOException {
     Assert.assertEquals(split.getToken(), newSplit.getToken());
     Assert.assertEquals(split.getPrincipal(), newSplit.getPrincipal());
     Assert.assertEquals(split.getInstanceName(), newSplit.getInstanceName());
-    Assert.assertEquals(DeprecationUtil.isMockInstanceSet(split),
-        DeprecationUtil.isMockInstanceSet(newSplit));
     Assert.assertEquals(split.getZooKeepers(), newSplit.getZooKeepers());
     Assert.assertEquals(split.getIterators(), newSplit.getIterators());
     Assert.assertEquals(split.getLogLevel(), newSplit.getLogLevel());
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloInputFormatTest.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormatTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormatTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormatTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormatTest.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormatTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormatTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormatTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormatTest.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/InputTableConfigTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/InputTableConfigTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapreduce/InputTableConfigTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/InputTableConfigTest.java
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
similarity index 95%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
index 7e0aaecbb9..0f2133a515 100644
--- 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
+++ 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
@@ -33,7 +33,6 @@
 import org.apache.accumulo.core.iterators.user.SummingCombiner;
 import org.apache.accumulo.core.iterators.user.WholeRowIterator;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.util.DeprecationUtil;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Level;
@@ -92,7 +91,6 @@ public void testAllFieldsWritable() throws IOException {
     split.setToken(new PasswordToken("password"));
     split.setPrincipal("root");
     split.setInstanceName("instance");
-    DeprecationUtil.setMockInstance(split, true);
     split.setZooKeepers("localhost");
     split.setIterators(iterators);
     split.setLogLevel(Level.WARN);
@@ -119,8 +117,6 @@ public void testAllFieldsWritable() throws IOException {
     Assert.assertEquals(split.getToken(), newSplit.getToken());
     Assert.assertEquals(split.getPrincipal(), newSplit.getPrincipal());
     Assert.assertEquals(split.getInstanceName(), newSplit.getInstanceName());
-    Assert.assertEquals(DeprecationUtil.isMockInstanceSet(split),
-        DeprecationUtil.isMockInstanceSet(newSplit));
     Assert.assertEquals(split.getZooKeepers(), newSplit.getZooKeepers());
     Assert.assertEquals(split.getIterators(), newSplit.getIterators());
     Assert.assertEquals(split.getLogLevel(), newSplit.getLogLevel());
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java
similarity index 95%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java
index 50b4966f66..a9fd63c1ab 100644
--- 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java
+++ 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java
@@ -34,7 +34,6 @@
 import org.apache.accumulo.core.iterators.user.SummingCombiner;
 import org.apache.accumulo.core.iterators.user.WholeRowIterator;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.util.DeprecationUtil;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Level;
@@ -91,7 +90,6 @@ public void testAllFieldsWritable() throws IOException {
     split.setFetchedColumns(fetchedColumns);
     split.setToken(new PasswordToken("password"));
     split.setPrincipal("root");
-    DeprecationUtil.setMockInstance(split, true);
     split.setInstanceName("instance");
     split.setZooKeepers("localhost");
     split.setIterators(iterators);
@@ -116,8 +114,6 @@ public void testAllFieldsWritable() throws IOException {
     Assert.assertEquals(split.getToken(), newSplit.getToken());
     Assert.assertEquals(split.getPrincipal(), newSplit.getPrincipal());
     Assert.assertEquals(split.getInstanceName(), newSplit.getInstanceName());
-    Assert.assertEquals(DeprecationUtil.isMockInstanceSet(split),
-        DeprecationUtil.isMockInstanceSet(newSplit));
     Assert.assertEquals(split.getZooKeepers(), newSplit.getZooKeepers());
     Assert.assertEquals(split.getIterators(), newSplit.getIterators());
     Assert.assertEquals(split.getLogLevel(), newSplit.getLogLevel());
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java
similarity index 88%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java
index 90bc8c4774..9cdd950002 100644
--- 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java
+++ 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java
@@ -26,7 +26,6 @@
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
-import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import 
org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer;
@@ -109,22 +108,6 @@ public void testSetZooKeeperInstance() {
     // assertEquals(1234000, ((ZooKeeperInstance) 
instance).getZooKeepersSessionTimeOut());
   }
 
-  @SuppressWarnings("deprecation")
-  @Test
-  public void testSetMockInstance() {
-    Class<?> mockClass = 
org.apache.accumulo.core.client.mock.MockInstance.class;
-    Configuration conf = new Configuration();
-    ConfiguratorBase.setMockInstance(this.getClass(), conf, 
"testInstanceName");
-    assertEquals("testInstanceName", conf
-        .get(ConfiguratorBase.enumToConfKey(this.getClass(), 
ConfiguratorBase.InstanceOpts.NAME)));
-    assertEquals(null, conf.get(ConfiguratorBase.enumToConfKey(this.getClass(),
-        ConfiguratorBase.InstanceOpts.ZOO_KEEPERS)));
-    assertEquals(mockClass.getSimpleName(), conf
-        .get(ConfiguratorBase.enumToConfKey(this.getClass(), 
ConfiguratorBase.InstanceOpts.TYPE)));
-    Instance instance = ConfiguratorBase.getInstance(this.getClass(), conf);
-    assertEquals(mockClass.getName(), instance.getClass().getName());
-  }
-
   @Test
   public void testSetLogLevel() {
     Configuration conf = new Configuration();
diff --git 
a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitionerTest.java
 
b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitionerTest.java
similarity index 100%
rename from 
core/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitionerTest.java
rename to 
client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitionerTest.java
diff --git a/core/pom.xml b/core/pom.xml
index 1643cd66d8..15bdb4f132 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -175,7 +175,6 @@
                 
<include>org[.]apache[.]accumulo[.]core[.]iterators[.]IteratorUtil[$]IteratorScope</include>
               </includes>
               <excludes>
-                <exclude>.*Impl</exclude>
                 <exclude>.*[.]impl[.].*</exclude>
                 <exclude>.*[.]thrift[.].*</exclude>
                 
<exclude>org[.]apache[.]accumulo[.]core[.]security[.]crypto[.].*</exclude>
@@ -185,23 +184,16 @@
                 <!--Allow API data types to reference thrift types, but do not
                     analyze thrift types -->
                 <allow>org[.]apache[.]accumulo[.].*[.]thrift[.].*</allow>
-                <!--Type from hadoop used in API.  If adding a new type from
+                <!--Type from hadoop used in API. If adding a new type from
                     Hadoop to the Accumulo API ensure its annotated as
                     stable.-->
                 <allow>org[.]apache[.]hadoop[.]conf[.]Configuration</allow>
                 <allow>org[.]apache[.]hadoop[.]fs[.](FileSystem|Path)</allow>
                 
<allow>org[.]apache[.]hadoop[.]io[.](Text|Writable|WritableComparable|WritableComparator)</allow>
-                
<allow>org[.]apache[.]hadoop[.]mapred[.](JobConf|RecordReader|InputSplit|RecordWriter|Reporter)</allow>
-                
<allow>org[.]apache[.]hadoop[.]mapred[.]FileOutputFormat[$]Counter</allow>
-                
<allow>org[.]apache[.]hadoop[.]mapreduce[.](Job|JobContext|RecordReader|InputSplit|TaskAttemptContext|RecordWriter|OutputCommitter|TaskInputOutputContext)</allow>
-                
<allow>org[.]apache[.]hadoop[.]mapreduce[.]lib[.]output[.]FileOutputFormat[$]Counter</allow>
-                <allow>org[.]apache[.]hadoop[.]util[.]Progressable</allow>
                 <!--ugghhh-->
                 <allow>org[.]apache[.]log4j[.](Level|Logger)</allow>
                 <!-- allow javax security exceptions for Authentication tokens 
-->
                 <allow>javax[.]security[.]auth[.]DestroyFailedException</allow>
-                <!-- allow questionable Hadoop exceptions for mapreduce -->
-                
<allow>org[.]apache[.]hadoop[.]mapred[.](FileAlreadyExistsException|InvalidJobConfException)</allow>
                 <!-- allow lexicoders to throw iterator exceptions -->
                 
<allow>org[.]apache[.]accumulo[.]core[.]iterators[.]ValueFormatException</allow>
               </allows>
diff --git 
a/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperations.java 
b/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperations.java
index d86e6afebe..6648acbb35 100644
--- 
a/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperations.java
+++ 
b/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperations.java
@@ -32,7 +32,6 @@
 import org.apache.accumulo.core.client.TableExistsException;
 import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.TableOfflineException;
-import org.apache.accumulo.core.client.mapreduce.AccumuloFileOutputFormat;
 import org.apache.accumulo.core.client.rfile.RFile;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
 import org.apache.accumulo.core.client.summary.Summarizer;
@@ -595,7 +594,7 @@ void setLocalityGroups(String tableName, 
Map<String,Set<Text>> groups)
 
   /**
    * Bulk import all the files in a directory into a table. Files can be 
created using
-   * {@link AccumuloFileOutputFormat} and {@link RFile#newWriter()}
+   * {@code AccumuloFileOutputFormat} and {@link RFile#newWriter()}
    *
    * @param tableName
    *          the name of the table
diff --git 
a/core/src/main/java/org/apache/accumulo/core/util/DeprecationUtil.java 
b/core/src/main/java/org/apache/accumulo/core/util/DeprecationUtil.java
index 727d1231fa..045da7c614 100644
--- a/core/src/main/java/org/apache/accumulo/core/util/DeprecationUtil.java
+++ b/core/src/main/java/org/apache/accumulo/core/util/DeprecationUtil.java
@@ -17,8 +17,6 @@
 package org.apache.accumulo.core.util;
 
 import org.apache.accumulo.core.client.Instance;
-import org.apache.accumulo.core.client.impl.TabletLocator;
-import org.apache.accumulo.core.client.mapreduce.RangeInputSplit;
 
 /**
  * A utility class for managing deprecated items. This avoids scattering 
private helper methods all
@@ -46,19 +44,4 @@ public static Instance makeMockInstance(String instance) {
     return new org.apache.accumulo.core.client.mock.MockInstance(instance);
   }
 
-  @SuppressWarnings("deprecation")
-  public static void setMockInstance(RangeInputSplit split, boolean 
isMockInstance) {
-    split.setMockInstance(isMockInstance);
-  }
-
-  @SuppressWarnings("deprecation")
-  public static boolean isMockInstanceSet(RangeInputSplit split) {
-    return split.isMockInstance();
-  }
-
-  @SuppressWarnings("deprecation")
-  public static TabletLocator makeMockLocator() {
-    return new org.apache.accumulo.core.client.mock.impl.MockTabletLocator();
-  }
-
 }
diff --git a/minicluster/pom.xml b/minicluster/pom.xml
index 4d4aa826fd..5fa8e319c6 100644
--- a/minicluster/pom.xml
+++ b/minicluster/pom.xml
@@ -145,11 +145,10 @@
                 <include>org[.]apache[.]accumulo[.]minicluster[.].*</include>
               </includes>
               <excludes>
-                <exclude>.*Impl</exclude>
                 <exclude>.*[.]impl[.].*</exclude>
               </excludes>
               <allows>
-                
<allow>org[.]apache[.]accumulo[.]core[.](client|data|security)[.](?!.*(impl|thrift|crypto).*).*</allow>
+                
<allow>org[.]apache[.]accumulo[.]core[.](?:client|data|security)[.](?!.*(impl|thrift|crypto).*).*</allow>
               </allows>
             </configuration>
           </execution>
diff --git a/pom.xml b/pom.xml
index 904e2b61f4..0c4a8700ed 100644
--- a/pom.xml
+++ b/pom.xml
@@ -78,6 +78,7 @@
   </mailingLists>
   <modules>
     <module>assemble</module>
+    <module>client/mapreduce</module>
     <module>core</module>
     <module>fate</module>
     <module>iterator-test-harness</module>
@@ -309,6 +310,11 @@
         <artifactId>log4j</artifactId>
         <version>1.2.17</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.accumulo</groupId>
+        <artifactId>accumulo-client-mapreduce</artifactId>
+        <version>${project.version}</version>
+      </dependency>
       <dependency>
         <groupId>org.apache.accumulo</groupId>
         <artifactId>accumulo-core</artifactId>
diff --git 
a/server/tserver/src/main/java/org/apache/accumulo/tserver/compaction/MajorCompactionRequest.java
 
b/server/tserver/src/main/java/org/apache/accumulo/tserver/compaction/MajorCompactionRequest.java
index 8876cacacd..6a7ffcbe0f 100644
--- 
a/server/tserver/src/main/java/org/apache/accumulo/tserver/compaction/MajorCompactionRequest.java
+++ 
b/server/tserver/src/main/java/org/apache/accumulo/tserver/compaction/MajorCompactionRequest.java
@@ -24,7 +24,6 @@
 import java.util.function.Predicate;
 
 import org.apache.accumulo.core.client.admin.TableOperations;
-import org.apache.accumulo.core.client.mapred.AccumuloFileOutputFormat;
 import org.apache.accumulo.core.client.rfile.RFile.WriterOptions;
 import org.apache.accumulo.core.client.summary.Summarizer;
 import org.apache.accumulo.core.client.summary.Summarizer.Combiner;
@@ -139,8 +138,6 @@ public MajorCompactionReason getReason() {
    *
    * @see Summarizer
    * @see TableOperations#addSummarizers(String, SummarizerConfiguration...)
-   * @see 
AccumuloFileOutputFormat#setSummarizers(org.apache.hadoop.mapred.JobConf,
-   *      SummarizerConfiguration...)
    * @see WriterOptions#withSummarizers(SummarizerConfiguration...)
    */
   public List<Summary> getSummaries(Collection<FileRef> files,
diff --git 
a/server/tserver/src/main/java/org/apache/accumulo/tserver/compaction/strategies/TooManyDeletesCompactionStrategy.java
 
b/server/tserver/src/main/java/org/apache/accumulo/tserver/compaction/strategies/TooManyDeletesCompactionStrategy.java
index be2d1abf5e..504c0bd5c9 100644
--- 
a/server/tserver/src/main/java/org/apache/accumulo/tserver/compaction/strategies/TooManyDeletesCompactionStrategy.java
+++ 
b/server/tserver/src/main/java/org/apache/accumulo/tserver/compaction/strategies/TooManyDeletesCompactionStrategy.java
@@ -27,7 +27,6 @@
 import java.util.Map.Entry;
 import java.util.function.Predicate;
 
-import org.apache.accumulo.core.client.mapred.AccumuloFileOutputFormat;
 import org.apache.accumulo.core.client.rfile.RFile.WriterOptions;
 import org.apache.accumulo.core.client.summary.SummarizerConfiguration;
 import org.apache.accumulo.core.client.summary.Summary;
@@ -65,8 +64,8 @@
  *
  * <p>
  * Bulk files can be generated with summary information by calling
- * {@link 
AccumuloFileOutputFormat#setSummarizers(org.apache.hadoop.mapred.JobConf, 
SummarizerConfiguration...)}
- * or {@link WriterOptions#withSummarizers(SummarizerConfiguration...)}
+ * {@code AccumuloFileOutputFormat#setSummarizers(JobConf, 
SummarizerConfiguration...)} or
+ * {@link WriterOptions#withSummarizers(SummarizerConfiguration...)}
  *
  * <p>
  * When this strategy does not decide to compact based on the number of 
deletes, then it will defer
@@ -105,6 +104,7 @@
 
   public static final String PROCEED_ZERO_NO_SUMMARY_OPT_DEFAULT = "false";
 
+  @Override
   public void init(Map<String,String> options) {
     this.threshold = Double.parseDouble(options.getOrDefault(THRESHOLD_OPT, 
THRESHOLD_OPT_DEFAULT));
     if (threshold <= 0.0 || threshold > 1.0) {
diff --git a/test/pom.xml b/test/pom.xml
index 1c6dbbe40f..82f2c09168 100644
--- a/test/pom.xml
+++ b/test/pom.xml
@@ -78,6 +78,10 @@
       <groupId>log4j</groupId>
       <artifactId>log4j</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.accumulo</groupId>
+      <artifactId>accumulo-client-mapreduce</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.accumulo</groupId>
       <artifactId>accumulo-core</artifactId>
diff --git a/test/src/main/java/org/apache/accumulo/test/mapreduce/RowHash.java 
b/test/src/main/java/org/apache/accumulo/test/mapreduce/RowHash.java
index f99619f3d0..93385614a1 100644
--- a/test/src/main/java/org/apache/accumulo/test/mapreduce/RowHash.java
+++ b/test/src/main/java/org/apache/accumulo/test/mapreduce/RowHash.java
@@ -20,9 +20,9 @@
 import java.util.Base64;
 import java.util.Collections;
 
-import org.apache.accumulo.core.cli.MapReduceClientOnRequiredTable;
 import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
 import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
+import 
org.apache.accumulo.core.client.mapreduce.lib.impl.MapReduceClientOnRequiredTable;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to