Repository: zeppelin
Updated Branches:
  refs/heads/master 11bdd7110 -> c4319b775


ZEPPELIN-1326: make profile to select dependency of hadoop-common for JDBC 
interpreter

### What is this PR for?
java.lang.ClassNotFoundException: 
org.apache.hadoop.security.UserGroupInformation$AuthenticationMethod error on 
using jdbc interpreter after #1205

```
java.lang.ClassNotFoundException: 
org.apache.hadoop.security.UserGroupInformation$AuthenticationMethod
        at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at 
org.apache.zeppelin.jdbc.security.JDBCSecurityImpl.getAuthtype(JDBCSecurityImpl.java:71)
        at 
org.apache.zeppelin.jdbc.JDBCInterpreter.getConnection(JDBCInterpreter.java:217)
        at 
org.apache.zeppelin.jdbc.JDBCInterpreter.getStatement(JDBCInterpreter.java:275)
        at 
org.apache.zeppelin.jdbc.JDBCInterpreter.executeSql(JDBCInterpreter.java:346)
        at 
org.apache.zeppelin.jdbc.JDBCInterpreter.interpret(JDBCInterpreter.java:452)
        at 
org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:94)
        at 
org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:383)
        at org.apache.zeppelin.scheduler.Job.run(Job.java:176)
        at 
org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:139)
        at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
```

Workaround is adding "org.apache.hadoop:hadoop-common:2.7.2" in the 
Dependencies section of interpreter setting on GUI. But jdbc interpreter 
supposed to work without asking user hadoop-common dependency.

### What type of PR is it?
[Bug Fix]

### What is the Jira issue?
* [ZEPPELIN-1326](https://issues.apache.org/jira/browse/ZEPPELIN-1326)

### How should this be tested?
On running this (JDBC) interpreter, the above stack trace should not come.

### Screenshots (if appropriate)

### Questions:
* Does the licenses files need update? n/a
* Is there breaking changes for older versions? n/a
* Does this needs documentation? n/a

Author: Prabhjyot Singh <[email protected]>

Closes #1353 from prabhjyotsingh/ZEPPELIN-1326 and squashes the following 
commits:

033da7b [Prabhjyot Singh] check if auth type is present in key
f64dbf0 [Prabhjyot Singh] make <scope>provided</scope>
0e0016a [Prabhjyot Singh] move profile hadoop-* to root level pom
f061da0 [Prabhjyot Singh] ZEPPELIN-1326: make profile to select dependency for 
hadoop-common


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/c4319b77
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/c4319b77
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/c4319b77

Branch: refs/heads/master
Commit: c4319b7751c7b6071bb71dab016970b44496fa99
Parents: 11bdd71
Author: Prabhjyot Singh <[email protected]>
Authored: Wed Aug 24 10:44:25 2016 +0530
Committer: Prabhjyot Singh <[email protected]>
Committed: Mon Aug 29 12:24:39 2016 +0530

----------------------------------------------------------------------
 jdbc/pom.xml                                    |  2 +-
 .../apache/zeppelin/jdbc/JDBCInterpreter.java   | 83 ++++++++++----------
 .../jdbc/security/JDBCSecurityImpl.java         |  1 -
 pom.xml                                         | 74 +++++++++++++++++
 spark-dependencies/pom.xml                      | 74 -----------------
 spark/pom.xml                                   | 74 -----------------
 6 files changed, 118 insertions(+), 190 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/c4319b77/jdbc/pom.xml
----------------------------------------------------------------------
diff --git a/jdbc/pom.xml b/jdbc/pom.xml
index f4e97c9..8fce336 100644
--- a/jdbc/pom.xml
+++ b/jdbc/pom.xml
@@ -74,7 +74,7 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
-      <version>2.7.2</version>
+      <version>${hadoop.version}</version>
       <scope>provided</scope>
     </dependency>
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/c4319b77/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java 
b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
index 0eb0dff..cb99efd 100644
--- a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
+++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
@@ -26,7 +26,7 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.*;
 
-import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.zeppelin.interpreter.Interpreter;
 import org.apache.zeppelin.interpreter.InterpreterContext;
@@ -167,7 +167,7 @@ public class JDBCInterpreter extends Interpreter {
 
     logger.debug("propertiesMap: {}", propertiesMap);
 
-    if 
(!StringUtils.isAnyEmpty(property.getProperty("zeppelin.jdbc.auth.type"))) {
+    if (!StringUtils.isEmpty(property.getProperty("zeppelin.jdbc.auth.type"))) 
{
       JDBCSecurityImpl.createSecureConfiguration(property);
     }
     for (String propertyKey : propertiesMap.keySet()) {
@@ -214,49 +214,52 @@ public class JDBCInterpreter extends Interpreter {
       Class.forName(properties.getProperty(DRIVER_KEY));
       final String url = properties.getProperty(URL_KEY);
 
-      UserGroupInformation.AuthenticationMethod authType = 
JDBCSecurityImpl.getAuthtype(property);
-      switch (authType) {
-          case KERBEROS:
-            if (user == null) {
-              connection = DriverManager.getConnection(url, properties);
-            } else {
-              if ("hive".equalsIgnoreCase(propertyKey)) {
-                connection = DriverManager.getConnection(url + 
";hive.server2.proxy.user=" + user,
-                    properties);
+      if 
(StringUtils.isEmpty(property.getProperty("zeppelin.jdbc.auth.type"))) {
+        connection = DriverManager.getConnection(url, properties);
+      } else {
+        UserGroupInformation.AuthenticationMethod authType = 
JDBCSecurityImpl.getAuthtype(property);
+        switch (authType) {
+            case KERBEROS:
+              if (user == null) {
+                connection = DriverManager.getConnection(url, properties);
               } else {
-                UserGroupInformation ugi = null;
-                try {
-                  ugi = UserGroupInformation.createProxyUser(user,
-                      UserGroupInformation.getCurrentUser());
-                } catch (Exception e) {
-                  logger.error("Error in createProxyUser", e);
-                  StringBuilder stringBuilder = new StringBuilder();
-                  stringBuilder.append(e.getMessage()).append("\n");
-                  stringBuilder.append(e.getCause());
-                  throw new InterpreterException(stringBuilder.toString());
-                }
-                try {
-                  connection = ugi.doAs(new 
PrivilegedExceptionAction<Connection>() {
-                    @Override
-                    public Connection run() throws Exception {
-                      return DriverManager.getConnection(url, properties);
-                    }
-                  });
-                } catch (Exception e) {
-                  logger.error("Error in doAs", e);
-                  StringBuilder stringBuilder = new StringBuilder();
-                  stringBuilder.append(e.getMessage()).append("\n");
-                  stringBuilder.append(e.getCause());
-                  throw new InterpreterException(stringBuilder.toString());
+                if ("hive".equalsIgnoreCase(propertyKey)) {
+                  connection = DriverManager.getConnection(url + 
";hive.server2.proxy.user=" + user,
+                      properties);
+                } else {
+                  UserGroupInformation ugi = null;
+                  try {
+                    ugi = UserGroupInformation.createProxyUser(user,
+                        UserGroupInformation.getCurrentUser());
+                  } catch (Exception e) {
+                    logger.error("Error in createProxyUser", e);
+                    StringBuilder stringBuilder = new StringBuilder();
+                    stringBuilder.append(e.getMessage()).append("\n");
+                    stringBuilder.append(e.getCause());
+                    throw new InterpreterException(stringBuilder.toString());
+                  }
+                  try {
+                    connection = ugi.doAs(new 
PrivilegedExceptionAction<Connection>() {
+                      @Override
+                      public Connection run() throws Exception {
+                        return DriverManager.getConnection(url, properties);
+                      }
+                    });
+                  } catch (Exception e) {
+                    logger.error("Error in doAs", e);
+                    StringBuilder stringBuilder = new StringBuilder();
+                    stringBuilder.append(e.getMessage()).append("\n");
+                    stringBuilder.append(e.getCause());
+                    throw new InterpreterException(stringBuilder.toString());
+                  }
                 }
               }
-            }
-            break;
+              break;
 
-          default:
-            connection = DriverManager.getConnection(url, properties);
+            default:
+              connection = DriverManager.getConnection(url, properties);
+        }
       }
-
     }
     propertyKeySqlCompleterMap.put(propertyKey, 
createSqlCompleter(connection));
     return connection;

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/c4319b77/jdbc/src/main/java/org/apache/zeppelin/jdbc/security/JDBCSecurityImpl.java
----------------------------------------------------------------------
diff --git 
a/jdbc/src/main/java/org/apache/zeppelin/jdbc/security/JDBCSecurityImpl.java 
b/jdbc/src/main/java/org/apache/zeppelin/jdbc/security/JDBCSecurityImpl.java
index 8cc2735..32a7990 100644
--- a/jdbc/src/main/java/org/apache/zeppelin/jdbc/security/JDBCSecurityImpl.java
+++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/security/JDBCSecurityImpl.java
@@ -18,7 +18,6 @@ package org.apache.zeppelin.jdbc.security;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.zeppelin.jdbc.SqlCompleter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/c4319b77/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index cd08740..76d319f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -657,6 +657,80 @@
 
   <profiles>
     <profile>
+      <id>hadoop-0.23</id>
+      <!-- SPARK-1121: Adds an explicit dependency on Avro to work around a
+        Hadoop 0.23.X issue -->
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.avro</groupId>
+          <artifactId>avro</artifactId>
+        </dependency>
+      </dependencies>
+      <properties>
+        <hadoop.version>0.23.10</hadoop.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-1</id>
+      <properties>
+        <hadoop.version>1.0.4</hadoop.version>
+        <avro.mapred.classifier>hadoop1</avro.mapred.classifier>
+        <codehaus.jackson.version>1.8.8</codehaus.jackson.version>
+        <akka.group>org.spark-project.akka</akka.group>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-2.2</id>
+      <properties>
+        <hadoop.version>2.2.0</hadoop.version>
+        <protobuf.version>2.5.0</protobuf.version>
+        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-2.3</id>
+      <properties>
+        <hadoop.version>2.3.0</hadoop.version>
+        <protobuf.version>2.5.0</protobuf.version>
+        <jets3t.version>0.9.3</jets3t.version>
+        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-2.4</id>
+      <properties>
+        <hadoop.version>2.4.0</hadoop.version>
+        <protobuf.version>2.5.0</protobuf.version>
+        <jets3t.version>0.9.3</jets3t.version>
+        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-2.6</id>
+      <properties>
+        <hadoop.version>2.6.0</hadoop.version>
+        <protobuf.version>2.5.0</protobuf.version>
+        <jets3t.version>0.9.3</jets3t.version>
+        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-2.7</id>
+      <properties>
+        <hadoop.version>2.7.2</hadoop.version>
+        <protobuf.version>2.5.0</protobuf.version>
+        <jets3t.version>0.9.0</jets3t.version>
+        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
+      </properties>
+    </profile>
+
+    <profile>
       <id>scala-2.10</id>
       <activation>
         <activeByDefault>true</activeByDefault>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/c4319b77/spark-dependencies/pom.xml
----------------------------------------------------------------------
diff --git a/spark-dependencies/pom.xml b/spark-dependencies/pom.xml
index 7da976a..ed2d322 100644
--- a/spark-dependencies/pom.xml
+++ b/spark-dependencies/pom.xml
@@ -533,80 +533,6 @@
     </profile>
 
     <profile>
-      <id>hadoop-0.23</id>
-      <!-- SPARK-1121: Adds an explicit dependency on Avro to work around a
-        Hadoop 0.23.X issue -->
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.avro</groupId>
-          <artifactId>avro</artifactId>
-        </dependency>
-      </dependencies>
-      <properties>
-        <hadoop.version>0.23.10</hadoop.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-1</id>
-      <properties>
-        <hadoop.version>1.0.4</hadoop.version>
-        <avro.mapred.classifier>hadoop1</avro.mapred.classifier>
-        <codehaus.jackson.version>1.8.8</codehaus.jackson.version>
-        <akka.group>org.spark-project.akka</akka.group>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.2</id>
-      <properties>
-        <hadoop.version>2.2.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.3</id>
-      <properties>
-        <hadoop.version>2.3.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.4</id>
-      <properties>
-        <hadoop.version>2.4.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.6</id>
-      <properties>
-        <hadoop.version>2.6.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.7</id>
-      <properties>
-        <hadoop.version>2.7.2</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.0</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
       <id>mapr3</id>
       <activation>
         <activeByDefault>false</activeByDefault>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/c4319b77/spark/pom.xml
----------------------------------------------------------------------
diff --git a/spark/pom.xml b/spark/pom.xml
index 5dcbb51..d2bed9f 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -659,80 +659,6 @@
     </profile>
 
     <profile>
-      <id>hadoop-0.23</id>
-      <!-- SPARK-1121: Adds an explicit dependency on Avro to work around a
-        Hadoop 0.23.X issue -->
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.avro</groupId>
-          <artifactId>avro</artifactId>
-        </dependency>
-      </dependencies>
-      <properties>
-        <hadoop.version>0.23.10</hadoop.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-1</id>
-      <properties>
-        <hadoop.version>1.0.4</hadoop.version>
-        <avro.mapred.classifier>hadoop1</avro.mapred.classifier>
-        <codehaus.jackson.version>1.8.8</codehaus.jackson.version>
-        <akka.group>org.spark-project.akka</akka.group>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.2</id>
-      <properties>
-        <hadoop.version>2.2.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.3</id>
-      <properties>
-        <hadoop.version>2.3.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.4</id>
-      <properties>
-        <hadoop.version>2.4.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.6</id>
-      <properties>
-        <hadoop.version>2.6.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.7</id>
-      <properties>
-        <hadoop.version>2.7.2</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.0</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-
-    <profile>
       <id>mapr3</id>
       <activation>
         <activeByDefault>false</activeByDefault>

Reply via email to