This is an automated email from the ASF dual-hosted git repository. volodymyr pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/drill.git
commit f3e32c359702a6b27ca9963008d44f894f6bbb54 Author: Vitalii Diravka <[email protected]> AuthorDate: Tue Sep 4 20:02:43 2018 +0300 DRILL-6540: Upgrade to HADOOP-3.0.3 libraries - accomodate apache and mapr profiles with hadoop 3.0 libraries - update HBase version - fix jdbc-all woodox dependency - unban Apache commons-logging dependency --- drill-yarn/pom.xml | 6 + exec/java-exec/pom.xml | 71 ++++++++ .../impl/scan/file/FileMetadataManager.java | 8 +- .../drill/exec/store/LocalSyncableFileSystem.java | 4 +- .../impersonation/TestImpersonationMetadata.java | 15 +- .../exec/physical/unit/TestOutputBatchSize.java | 5 +- .../org/apache/drill/exec/work/batch/FileTest.java | 4 +- exec/jdbc-all/pom.xml | 5 +- .../org/apache/drill/jdbc/DrillbitClassLoader.java | 21 +-- .../org/apache/drill/jdbc/ITTestShadedJar.java | 7 +- pom.xml | 190 ++++++++++++--------- 11 files changed, 222 insertions(+), 114 deletions(-) diff --git a/drill-yarn/pom.xml b/drill-yarn/pom.xml index 9de6d47..57d2073 100644 --- a/drill-yarn/pom.xml +++ b/drill-yarn/pom.xml @@ -83,6 +83,12 @@ <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-client</artifactId> <scope>compile</scope> + <exclusions> + <exclusion> + <artifactId>slf4j-log4j12</artifactId> + <groupId>org.slf4j</groupId> + </exclusion> + </exclusions> </dependency> <!-- For ZK monitoring --> diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml index 38a872b..176b388 100644 --- a/exec/java-exec/pom.xml +++ b/exec/java-exec/pom.xml @@ -396,6 +396,26 @@ <groupId>io.netty</groupId> <artifactId>netty-all</artifactId> </exclusion> + <exclusion> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-server</artifactId> + </exclusion> + <exclusion> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-servlet</artifactId> + </exclusion> + <exclusion> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-servlets</artifactId> + </exclusion> + <exclusion> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-security</artifactId> + </exclusion> + <exclusion> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-util</artifactId> + </exclusion> </exclusions> </dependency> <dependency> @@ -434,6 +454,57 @@ <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> </exclusion> +<!----> + <!--<exclusion>--> + <!--<groupId>com.sun.jersey</groupId>--> + <!--<artifactId>jersey-core</artifactId>--> + <!--</exclusion>--> + <!--<exclusion>--> + <!--<groupId>com.sun.jersey</groupId>--> + <!--<artifactId>jersey-server</artifactId>--> + <!--</exclusion>--> + <!--<exclusion>--> + <!--<groupId>com.sun.jersey</groupId>--> + <!--<artifactId>jersey-json</artifactId>--> + <!--</exclusion>--> +<!----> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <scope>test</scope> + <exclusions> + <exclusion> + <groupId>io.netty</groupId> + <artifactId>netty</artifactId> + </exclusion> + <exclusion> + <groupId>io.netty</groupId> + <artifactId>netty-all</artifactId> + </exclusion> + <exclusion> + <groupId>commons-codec</groupId> + <artifactId>commons-codec</artifactId> + </exclusion> + <!----> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-core</artifactId> + </exclusion> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-server</artifactId> + </exclusion> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-json</artifactId> + </exclusion> + <!----> + <exclusion> + <groupId>log4j</groupId> + <artifactId>log4j</artifactId> + </exclusion> </exclusions> </dependency> <dependency> diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/file/FileMetadataManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/file/FileMetadataManager.java index c8bb5ed..330a2ab 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/file/FileMetadataManager.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/file/FileMetadataManager.java @@ -21,7 +21,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.apache.directory.api.util.Strings; import org.apache.drill.common.map.CaseInsensitiveMap; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.physical.impl.scan.project.ColumnProjection; @@ -37,6 +36,7 @@ import org.apache.drill.exec.record.metadata.TupleMetadata; import org.apache.drill.exec.server.options.OptionSet; import org.apache.drill.exec.store.ColumnExplorer.ImplicitFileColumns; import org.apache.drill.exec.vector.ValueVector; +import org.apache.drill.shaded.guava.com.google.common.base.Strings; import org.apache.hadoop.fs.Path; import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting; @@ -58,7 +58,7 @@ import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTes * On each file (on each reader), the columns are "resolved." Here, that means * that the columns are filled in with actual values based on the present file. * <p> - * This is the successor to {@link ColumnExplorer}. + * This is the successor to {@link org.apache.drill.exec.store.ColumnExplorer}. */ public class FileMetadataManager implements MetadataManager, ReaderProjectionResolver, VectorSource { @@ -167,8 +167,6 @@ public class FileMetadataManager implements MetadataManager, ReaderProjectionRes * one file, rather than a directory * @param files the set of files to scan. Used to compute the maximum partition * depth across all readers in this fragment - * - * @return this builder */ public FileMetadataManager(OptionSet optionManager, @@ -178,7 +176,7 @@ public class FileMetadataManager implements MetadataManager, ReaderProjectionRes partitionDesignator = optionManager.getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL); for (ImplicitFileColumns e : ImplicitFileColumns.values()) { String colName = optionManager.getString(e.optionName()); - if (! Strings.isEmpty(colName)) { + if (!Strings.isNullOrEmpty(colName)) { FileMetadataColumnDefn defn = new FileMetadataColumnDefn(colName, e); implicitColDefns.add(defn); fileMetadataColIndex.put(defn.colName, defn); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/LocalSyncableFileSystem.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/LocalSyncableFileSystem.java index 9363954..21d66f0 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/LocalSyncableFileSystem.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/LocalSyncableFileSystem.java @@ -65,7 +65,7 @@ public class LocalSyncableFileSystem extends FileSystem { @Override public FSDataOutputStream create(Path path, FsPermission fsPermission, boolean b, int i, short i2, long l, Progressable progressable) throws IOException { - return new FSDataOutputStream(new LocalSyncableOutputStream(path)); + return new FSDataOutputStream(new LocalSyncableOutputStream(path), new Statistics(path.toUri().getScheme())); } @Override @@ -141,7 +141,7 @@ public class LocalSyncableFileSystem extends FileSystem { output = new BufferedOutputStream(fos, 64*1024); } - @Override + // TODO: remove it after upgrade MapR profile onto hadoop.version 3.1 public void sync() throws IOException { output.flush(); fos.getFD().sync(); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java index 2c6e4ee..0e9c0e0 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java @@ -268,17 +268,20 @@ public class TestImpersonationMetadata extends BaseTestImpersonation { @Test public void testCreateViewInWSWithNoPermissionsForQueryUser() throws Exception { // Workspace dir owned by "processUser", workspace group is "group0" and "user2" is not part of "group0" - final String viewSchema = MINI_DFS_STORAGE_PLUGIN_NAME + ".drill_test_grp_0_755"; + final String tableWS = "drill_test_grp_0_755"; + final String viewSchema = MINI_DFS_STORAGE_PLUGIN_NAME + "." + tableWS; final String viewName = "view1"; updateClient(user2); test("USE " + viewSchema); - final String query = "CREATE VIEW " + viewName + " AS SELECT " + - "c_custkey, c_nationkey FROM cp.`tpch/customer.parquet` ORDER BY c_custkey;"; - final String expErrorMsg = "PERMISSION ERROR: Permission denied: user=drillTestUser2, access=WRITE, inode=\"/drill_test_grp_0_755"; - errorMsgTestHelper(query, expErrorMsg); + String expErrorMsg = "PERMISSION ERROR: Permission denied: user=drillTestUser2, access=WRITE, inode=\"/" + tableWS; + thrown.expect(UserRemoteException.class); + thrown.expectMessage(containsString(expErrorMsg)); + + test("CREATE VIEW %s AS" + + " SELECT c_custkey, c_nationkey FROM cp.`tpch/customer.parquet` ORDER BY c_custkey", viewName); // SHOW TABLES is expected to return no records as view creation fails above. testBuilder() @@ -348,7 +351,7 @@ public class TestImpersonationMetadata extends BaseTestImpersonation { thrown.expect(UserRemoteException.class); thrown.expectMessage(containsString("Permission denied: user=drillTestUser2, " + - "access=WRITE, inode=\"/drill_test_grp_0_755")); + "access=WRITE, inode=\"/" + tableWS)); test("CREATE TABLE %s AS SELECT c_custkey, c_nationkey " + "FROM cp.`tpch/customer.parquet` ORDER BY c_custkey", tableName); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestOutputBatchSize.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestOutputBatchSize.java index ce1b8c9..97ffb21 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestOutputBatchSize.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestOutputBatchSize.java @@ -20,7 +20,6 @@ package org.apache.drill.exec.physical.unit; import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList; import org.apache.drill.shaded.guava.com.google.common.collect.Lists; import org.apache.calcite.rel.core.JoinRelType; -import org.apache.directory.api.util.Strings; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.expression.LogicalExpression; @@ -328,7 +327,7 @@ public class TestOutputBatchSize extends PhysicalOpUnitTestBase { expr[i * 2] = "lower(" + baselineColumns[i] + ")"; expr[i * 2 + 1] = baselineColumns[i]; } - baselineValues[i] = (transfer ? testString : Strings.lowerCase(testString)); + baselineValues[i] = (transfer ? testString : testString.toLowerCase()); } jsonRow.append("}"); StringBuilder batchString = new StringBuilder("["); @@ -385,7 +384,7 @@ public class TestOutputBatchSize extends PhysicalOpUnitTestBase { expr[i * 2] = "lower(" + baselineColumns[i] + ")"; expr[i * 2 + 1] = baselineColumns[i]; - baselineValues[i] = Strings.lowerCase(testString); + baselineValues[i] = testString.toLowerCase(); } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/FileTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/FileTest.java index 2799838..04e59f6 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/FileTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/FileTest.java @@ -43,7 +43,7 @@ public class FileTest { FSDataOutputStream out = fs.create(path); byte[] s = "hello world".getBytes(); out.write(s); - out.sync(); + out.hsync(); FSDataInputStream in = fs.open(path); byte[] bytes = new byte[s.length]; in.read(bytes); @@ -60,7 +60,7 @@ public class FileTest { bytes = new byte[256*1024]; Stopwatch watch = Stopwatch.createStarted(); out.write(bytes); - out.sync(); + out.hsync(); long t = watch.elapsed(TimeUnit.MILLISECONDS); logger.info(String.format("Elapsed: %d. Rate %d.\n", t, (long) ((long) bytes.length * 1000L / t))); } diff --git a/exec/jdbc-all/pom.xml b/exec/jdbc-all/pom.xml index 13234fd..d523606 100644 --- a/exec/jdbc-all/pom.xml +++ b/exec/jdbc-all/pom.xml @@ -249,6 +249,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-failsafe-plugin</artifactId> + <version>3.0.0-M3</version> <executions> <execution> <goals> @@ -341,6 +342,7 @@ <exclude>commons-beanutils:commons-beanutils-core:jar:*</exclude> <exclude>commons-beanutils:commons-beanutils:jar:*</exclude> <exclude>io.netty:netty-tcnative:jar:*</exclude> + <exclude>com.fasterxml.woodstox:woodstox-core:jar:*</exclude> </excludes> </artifactSet> <relocations> @@ -403,6 +405,7 @@ <relocation><pattern>org.apache.xpath.</pattern><shadedPattern>oadd.org.apache.xpath.</shadedPattern></relocation> <relocation><pattern>org.apache.zookeeper.</pattern><shadedPattern>oadd.org.apache.zookeeper.</shadedPattern></relocation> <relocation><pattern>org.apache.hadoop.</pattern><shadedPattern>oadd.org.apache.hadoop.</shadedPattern></relocation> + <relocation><pattern>com.fasterxml.woodstox.</pattern><shadedPattern>oadd.com.fasterxml.woodstox.</shadedPattern></relocation> </relocations> <transformers> <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer"> @@ -528,7 +531,7 @@ This is likely due to you adding new dependencies to a java-exec and not updating the excludes in this module. This is important as it minimizes the size of the dependency of Drill application users. </message> - <maxsize>41000000</maxsize> + <maxsize>42600000</maxsize> <minsize>15000000</minsize> <files> <file>${project.build.directory}/drill-jdbc-all-${project.version}.jar</file> diff --git a/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/DrillbitClassLoader.java b/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/DrillbitClassLoader.java index bc31f99..eaedf56 100644 --- a/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/DrillbitClassLoader.java +++ b/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/DrillbitClassLoader.java @@ -26,16 +26,16 @@ import java.util.List; public class DrillbitClassLoader extends URLClassLoader { - public DrillbitClassLoader() { + DrillbitClassLoader() { super(URLS); } private static final URL[] URLS; static { - ArrayList<URL> urlList = new ArrayList<URL>(); + ArrayList<URL> urlList = new ArrayList<>(); final String classPath = System.getProperty("app.class.path"); - final String[] st = fracture(classPath, File.pathSeparator); + final String[] st = fracture(classPath); final int l = st.length; for (int i = 0; i < l; i++) { try { @@ -49,10 +49,7 @@ public class DrillbitClassLoader extends URLClassLoader { } urlList.toArray(new URL[urlList.size()]); - List<URL> urls = new ArrayList<>(); - for (URL url : urlList) { - urls.add(url); - } + List<URL> urls = new ArrayList<>(urlList); URLS = urls.toArray(new URL[urls.size()]); } @@ -61,21 +58,21 @@ public class DrillbitClassLoader extends URLClassLoader { * * Taken from Apache Harmony */ - private static String[] fracture(String str, String sep) { + private static String[] fracture(String str) { if (str.length() == 0) { return new String[0]; } - ArrayList<String> res = new ArrayList<String>(); + ArrayList<String> res = new ArrayList<>(); int in = 0; int curPos = 0; - int i = str.indexOf(sep); - int len = sep.length(); + int i = str.indexOf(File.pathSeparator); + int len = File.pathSeparator.length(); while (i != -1) { String s = str.substring(curPos, i); res.add(s); in++; curPos = i + len; - i = str.indexOf(sep, curPos); + i = str.indexOf(File.pathSeparator, curPos); } len = str.length(); diff --git a/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java b/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java index 99f399d..19a4be8 100644 --- a/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java +++ b/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java @@ -105,6 +105,7 @@ public class ITTestShadedJar extends BaseTest { super.failed(e, description); done(); runMethod("failed", description); + logger.error("Check whether this test was running within 'integration-test' Maven phase"); } private void done() { @@ -235,8 +236,8 @@ public class ITTestShadedJar extends BaseTest { private static void runWithLoader(String name, ClassLoader loader) throws Exception { Class<?> clazz = loader.loadClass(ITTestShadedJar.class.getName() + "$" + name); - Object o = clazz.getDeclaredConstructors()[0].newInstance(loader); - clazz.getMethod("go").invoke(o); + Object instance = clazz.getDeclaredConstructors()[0].newInstance(loader); + clazz.getMethod("go").invoke(instance); } public abstract static class AbstractLoaderThread extends Thread { @@ -283,7 +284,7 @@ public class ITTestShadedJar extends BaseTest { // loader.loadClass("org.apache.drill.exec.exception.SchemaChangeException"); // execute a single query to make sure the drillbit is fully up - clazz.getMethod("testNoResult", String.class, new Object[] {}.getClass()) + clazz.getMethod("testNoResult", String.class, Object[].class) .invoke(null, "select * from (VALUES 1)", new Object[] {}); SEM.release(); diff --git a/pom.xml b/pom.xml index c551747..e012ec2 100644 --- a/pom.xml +++ b/pom.xml @@ -82,8 +82,8 @@ Apache Hive 2.3.2. If the version is changed, make sure the jars and their dependencies are updated. --> <hive.version>2.3.2</hive.version> - <hadoop.version>2.7.4</hadoop.version> - <hbase.version>2.1.1</hbase.version> + <hadoop.version>3.0.3</hadoop.version> + <hbase.version>2.1.4</hbase.version> <fmpp.version>1.0</fmpp.version> <freemarker.version>2.3.28</freemarker.version> <javassist.version>3.25.0-GA</javassist.version> @@ -511,7 +511,7 @@ <rules> <bannedDependencies> <excludes> - <exclude>commons-logging</exclude> + <!--<exclude>commons-logging</exclude>--> <exclude>javax.servlet:servlet-api</exclude> <exclude>org.mortbay.jetty:servlet-api</exclude> <exclude>org.mortbay.jetty:servlet-api-2.5</exclude> @@ -1006,10 +1006,17 @@ <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> - <version>1.0.13</version> + <version>${logback.version}</version> <scope>test</scope> </dependency> <dependency> + <groupId>ch.qos.logback</groupId> + <artifactId>logback-core</artifactId> + <version>${logback.version}</version> + <scope>test</scope> + </dependency> + + <dependency> <groupId>de.huxhorn.lilith</groupId> <artifactId>de.huxhorn.lilith.logback.appender.multiplex-classic</artifactId> <version>0.9.44</version> @@ -1055,6 +1062,26 @@ <dependencyManagement> <dependencies> <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + <version>${dep.slf4j.version}</version> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>jul-to-slf4j</artifactId> + <version>${dep.slf4j.version}</version> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>jcl-over-slf4j</artifactId> + <version>${dep.slf4j.version}</version> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>log4j-over-slf4j</artifactId> + <version>${dep.slf4j.version}</version> + </dependency> + <dependency> <groupId>${calcite.groupId}</groupId> <artifactId>calcite-core</artifactId> <version>${calcite.version}</version> @@ -1886,14 +1913,14 @@ <artifactId>mockito-all</artifactId> <groupId>org.mockito</groupId> </exclusion> - <exclusion> - <artifactId>commons-logging-api</artifactId> - <groupId>commons-logging</groupId> - </exclusion> - <exclusion> - <artifactId>commons-logging</artifactId> - <groupId>commons-logging</groupId> - </exclusion> + <!--<exclusion>--> + <!--<artifactId>commons-logging-api</artifactId>--> + <!--<groupId>commons-logging</groupId>--> + <!--</exclusion>--> + <!--<exclusion>--> + <!--<artifactId>commons-logging</artifactId>--> + <!--<groupId>commons-logging</groupId>--> + <!--</exclusion>--> <exclusion> <groupId>com.sun.jersey</groupId> <artifactId>jersey-core</artifactId> @@ -1936,6 +1963,7 @@ </exclusion> </exclusions> </dependency> + <!-- Hadoop Test Dependencies --> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> @@ -2035,6 +2063,76 @@ </dependency> <dependency> <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + <exclusions> + <exclusion> + <groupId>commons-logging</groupId> + <artifactId>commons-logging</artifactId> + </exclusion> + <exclusion> + <groupId>org.mortbay.jetty</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + <exclusion> + <groupId>javax.servlet</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + <exclusion> + <groupId>io.netty</groupId> + <artifactId>netty-all</artifactId> + </exclusion> + <exclusion> + <groupId>io.netty</groupId> + <artifactId>netty</artifactId> + </exclusion> + <exclusion> + <groupId>log4j</groupId> + <artifactId>log4j</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + <classifier>tests</classifier> + <exclusions> + <exclusion> + <groupId>commons-logging</groupId> + <artifactId>commons-logging</artifactId> + </exclusion> + <exclusion> + <groupId>org.mortbay.jetty</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + <exclusion> + <groupId>javax.servlet</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + <exclusion> + <groupId>log4j</groupId> + <artifactId>log4j</artifactId> + </exclusion> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-core</artifactId> + </exclusion> + <exclusion> + <groupId>io.netty</groupId> + <artifactId>netty-all</artifactId> + </exclusion> + <exclusion> + <groupId>io.netty</groupId> + <artifactId>netty</artifactId> + </exclusion> + </exclusions> + </dependency> + <!-- Hadoop Test Dependencies --> + <dependency> + <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> <exclusions> @@ -2541,74 +2639,6 @@ <version>${jersey.version}</version> </dependency> <!--/GlassFish Jersey dependecies--> - - <!-- Test Dependencies --> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <version>${hadoop.version}</version> - <scope>test</scope> - <exclusions> - <exclusion> - <groupId>commons-logging</groupId> - <artifactId>commons-logging</artifactId> - </exclusion> - <exclusion> - <groupId>org.mortbay.jetty</groupId> - <artifactId>servlet-api</artifactId> - </exclusion> - <exclusion> - <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> - </exclusion> - <exclusion> - <groupId>io.netty</groupId> - <artifactId>netty-all</artifactId> - </exclusion> - <exclusion> - <groupId>io.netty</groupId> - <artifactId>netty</artifactId> - </exclusion> - </exclusions> - </dependency> - <!-- Test Dependencies --> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <version>${hadoop.version}</version> - <scope>test</scope> - <classifier>tests</classifier> - <exclusions> - <exclusion> - <groupId>commons-logging</groupId> - <artifactId>commons-logging</artifactId> - </exclusion> - <exclusion> - <groupId>org.mortbay.jetty</groupId> - <artifactId>servlet-api</artifactId> - </exclusion> - <exclusion> - <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> - </exclusion> - <exclusion> - <groupId>log4j</groupId> - <artifactId>log4j</artifactId> - </exclusion> - <exclusion> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-core</artifactId> - </exclusion> - <exclusion> - <groupId>io.netty</groupId> - <artifactId>netty-all</artifactId> - </exclusion> - <exclusion> - <groupId>io.netty</groupId> - <artifactId>netty</artifactId> - </exclusion> - </exclusions> - </dependency> </dependencies> </dependencyManagement> </profile>
