Repository: spark
Updated Branches:
  refs/heads/master cc38abc27 -> 3ed91c9b8


[SPARK-25946][BUILD] Upgrade ASM to 7.x to support JDK11

## What changes were proposed in this pull request?

Upgrade ASM to 7.x to support JDK11

## How was this patch tested?

Existing tests.

Closes #22953 from dbtsai/asm7.

Authored-by: DB Tsai <d_t...@apple.com>
Signed-off-by: DB Tsai <d_t...@apple.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3ed91c9b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3ed91c9b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3ed91c9b

Branch: refs/heads/master
Commit: 3ed91c9b8998f2512716f906cd1cba25578111ff
Parents: cc38abc
Author: DB Tsai <d_t...@apple.com>
Authored: Tue Nov 6 05:38:59 2018 +0000
Committer: DB Tsai <d_t...@apple.com>
Committed: Tue Nov 6 05:38:59 2018 +0000

----------------------------------------------------------------------
 core/pom.xml                                      |  2 +-
 .../org/apache/spark/util/ClosureCleaner.scala    | 18 +++++++++---------
 dev/deps/spark-deps-hadoop-2.7                    |  2 +-
 dev/deps/spark-deps-hadoop-3.1                    |  2 +-
 graphx/pom.xml                                    |  2 +-
 .../apache/spark/graphx/util/BytecodeUtils.scala  |  8 ++++----
 pom.xml                                           |  8 ++++----
 repl/pom.xml                                      |  2 +-
 .../apache/spark/repl/ExecutorClassLoader.scala   |  6 +++---
 sql/core/pom.xml                                  |  2 +-
 10 files changed, 26 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index f23d09f..5c26f9a 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -56,7 +56,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.xbean</groupId>
-      <artifactId>xbean-asm6-shaded</artifactId>
+      <artifactId>xbean-asm7-shaded</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>

http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala 
b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
index 6c4740c..1b3e525 100644
--- a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
@@ -23,8 +23,8 @@ import java.lang.invoke.SerializedLambda
 import scala.collection.mutable.{Map, Set, Stack}
 import scala.language.existentials
 
-import org.apache.xbean.asm6.{ClassReader, ClassVisitor, MethodVisitor, Type}
-import org.apache.xbean.asm6.Opcodes._
+import org.apache.xbean.asm7.{ClassReader, ClassVisitor, MethodVisitor, Type}
+import org.apache.xbean.asm7.Opcodes._
 
 import org.apache.spark.{SparkEnv, SparkException}
 import org.apache.spark.internal.Logging
@@ -424,7 +424,7 @@ private[spark] class ReturnStatementInClosureException
   extends SparkException("Return statements aren't allowed in Spark closures")
 
 private class ReturnStatementFinder(targetMethodName: Option[String] = None)
-  extends ClassVisitor(ASM6) {
+  extends ClassVisitor(ASM7) {
   override def visitMethod(access: Int, name: String, desc: String,
       sig: String, exceptions: Array[String]): MethodVisitor = {
 
@@ -438,7 +438,7 @@ private class ReturnStatementFinder(targetMethodName: 
Option[String] = None)
       val isTargetMethod = targetMethodName.isEmpty ||
         name == targetMethodName.get || name == 
targetMethodName.get.stripSuffix("$adapted")
 
-      new MethodVisitor(ASM6) {
+      new MethodVisitor(ASM7) {
         override def visitTypeInsn(op: Int, tp: String) {
           if (op == NEW && tp.contains("scala/runtime/NonLocalReturnControl") 
&& isTargetMethod) {
             throw new ReturnStatementInClosureException
@@ -446,7 +446,7 @@ private class ReturnStatementFinder(targetMethodName: 
Option[String] = None)
         }
       }
     } else {
-      new MethodVisitor(ASM6) {}
+      new MethodVisitor(ASM7) {}
     }
   }
 }
@@ -470,7 +470,7 @@ private[util] class FieldAccessFinder(
     findTransitively: Boolean,
     specificMethod: Option[MethodIdentifier[_]] = None,
     visitedMethods: Set[MethodIdentifier[_]] = Set.empty)
-  extends ClassVisitor(ASM6) {
+  extends ClassVisitor(ASM7) {
 
   override def visitMethod(
       access: Int,
@@ -485,7 +485,7 @@ private[util] class FieldAccessFinder(
       return null
     }
 
-    new MethodVisitor(ASM6) {
+    new MethodVisitor(ASM7) {
       override def visitFieldInsn(op: Int, owner: String, name: String, desc: 
String) {
         if (op == GETFIELD) {
           for (cl <- fields.keys if cl.getName == owner.replace('/', '.')) {
@@ -525,7 +525,7 @@ private[util] class FieldAccessFinder(
   }
 }
 
-private class InnerClosureFinder(output: Set[Class[_]]) extends 
ClassVisitor(ASM6) {
+private class InnerClosureFinder(output: Set[Class[_]]) extends 
ClassVisitor(ASM7) {
   var myName: String = null
 
   // TODO: Recursively find inner closures that we indirectly reference, e.g.
@@ -540,7 +540,7 @@ private class InnerClosureFinder(output: Set[Class[_]]) 
extends ClassVisitor(ASM
 
   override def visitMethod(access: Int, name: String, desc: String,
       sig: String, exceptions: Array[String]): MethodVisitor = {
-    new MethodVisitor(ASM6) {
+    new MethodVisitor(ASM7) {
       override def visitMethodInsn(
           op: Int, owner: String, name: String, desc: String, itf: Boolean) {
         val argTypes = Type.getArgumentTypes(desc)

http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/dev/deps/spark-deps-hadoop-2.7
----------------------------------------------------------------------
diff --git a/dev/deps/spark-deps-hadoop-2.7 b/dev/deps/spark-deps-hadoop-2.7
index db84b85..15a5709 100644
--- a/dev/deps/spark-deps-hadoop-2.7
+++ b/dev/deps/spark-deps-hadoop-2.7
@@ -190,7 +190,7 @@ stringtemplate-3.2.1.jar
 super-csv-2.2.0.jar
 univocity-parsers-2.7.3.jar
 validation-api-1.1.0.Final.jar
-xbean-asm6-shaded-4.8.jar
+xbean-asm7-shaded-4.12.jar
 xercesImpl-2.9.1.jar
 xmlenc-0.52.jar
 xz-1.5.jar

http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/dev/deps/spark-deps-hadoop-3.1
----------------------------------------------------------------------
diff --git a/dev/deps/spark-deps-hadoop-3.1 b/dev/deps/spark-deps-hadoop-3.1
index befb93d..6d9191a 100644
--- a/dev/deps/spark-deps-hadoop-3.1
+++ b/dev/deps/spark-deps-hadoop-3.1
@@ -211,7 +211,7 @@ token-provider-1.0.1.jar
 univocity-parsers-2.7.3.jar
 validation-api-1.1.0.Final.jar
 woodstox-core-5.0.3.jar
-xbean-asm6-shaded-4.8.jar
+xbean-asm7-shaded-4.12.jar
 xz-1.5.jar
 zjsonpatch-0.3.0.jar
 zookeeper-3.4.9.jar

http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/graphx/pom.xml
----------------------------------------------------------------------
diff --git a/graphx/pom.xml b/graphx/pom.xml
index d65a8ce..22bc148 100644
--- a/graphx/pom.xml
+++ b/graphx/pom.xml
@@ -53,7 +53,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.xbean</groupId>
-      <artifactId>xbean-asm6-shaded</artifactId>
+      <artifactId>xbean-asm7-shaded</artifactId>
     </dependency>
     <dependency>
       <groupId>com.google.guava</groupId>

http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/graphx/src/main/scala/org/apache/spark/graphx/util/BytecodeUtils.scala
----------------------------------------------------------------------
diff --git 
a/graphx/src/main/scala/org/apache/spark/graphx/util/BytecodeUtils.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/util/BytecodeUtils.scala
index 50b03f7..4ea09ec 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/util/BytecodeUtils.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/util/BytecodeUtils.scala
@@ -22,8 +22,8 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
 import scala.collection.mutable.HashSet
 import scala.language.existentials
 
-import org.apache.xbean.asm6.{ClassReader, ClassVisitor, MethodVisitor}
-import org.apache.xbean.asm6.Opcodes._
+import org.apache.xbean.asm7.{ClassReader, ClassVisitor, MethodVisitor}
+import org.apache.xbean.asm7.Opcodes._
 
 import org.apache.spark.util.Utils
 
@@ -109,14 +109,14 @@ private[graphx] object BytecodeUtils {
    * determine the actual method invoked by inspecting the bytecode.
    */
   private class MethodInvocationFinder(className: String, methodName: String)
-    extends ClassVisitor(ASM6) {
+    extends ClassVisitor(ASM7) {
 
     val methodsInvoked = new HashSet[(Class[_], String)]
 
     override def visitMethod(access: Int, name: String, desc: String,
                              sig: String, exceptions: Array[String]): 
MethodVisitor = {
       if (name == methodName) {
-        new MethodVisitor(ASM6) {
+        new MethodVisitor(ASM7) {
           override def visitMethodInsn(
               op: Int, owner: String, name: String, desc: String, itf: 
Boolean) {
             if (op == INVOKEVIRTUAL || op == INVOKESPECIAL || op == 
INVOKESTATIC) {

http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 597fb2f..a08b7fd 100644
--- a/pom.xml
+++ b/pom.xml
@@ -311,13 +311,13 @@
         <artifactId>chill-java</artifactId>
         <version>${chill.version}</version>
       </dependency>
-      <!-- This artifact is a shaded version of ASM 6.x. The POM that was used 
to produce this
-           is at 
https://github.com/apache/geronimo-xbean/tree/trunk/xbean-asm6-shaded
+      <!-- This artifact is a shaded version of ASM 7.x. The POM that was used 
to produce this
+           is at 
https://github.com/apache/geronimo-xbean/tree/trunk/xbean-asm7-shaded
            For context on why we shade ASM, see SPARK-782 and SPARK-6152. -->
       <dependency>
         <groupId>org.apache.xbean</groupId>
-        <artifactId>xbean-asm6-shaded</artifactId>
-        <version>4.8</version>
+        <artifactId>xbean-asm7-shaded</artifactId>
+        <version>4.12</version>
       </dependency>
 
       <!-- Shaded deps marked as provided. These are promoted to compile scope

http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/repl/pom.xml
----------------------------------------------------------------------
diff --git a/repl/pom.xml b/repl/pom.xml
index d2a89b2..fa015b6 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -102,7 +102,7 @@
 
     <dependency>
       <groupId>org.apache.xbean</groupId>
-      <artifactId>xbean-asm6-shaded</artifactId>
+      <artifactId>xbean-asm7-shaded</artifactId>
     </dependency>
 
     <!-- Explicit listing of transitive deps that are shaded. Otherwise, odd 
compiler crashes. -->

http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
----------------------------------------------------------------------
diff --git 
a/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala 
b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
index 88eb0ad..3176502 100644
--- a/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
@@ -22,8 +22,8 @@ import java.net.{URI, URL, URLEncoder}
 import java.nio.channels.Channels
 
 import org.apache.hadoop.fs.{FileSystem, Path}
-import org.apache.xbean.asm6._
-import org.apache.xbean.asm6.Opcodes._
+import org.apache.xbean.asm7._
+import org.apache.xbean.asm7.Opcodes._
 
 import org.apache.spark.{SparkConf, SparkEnv}
 import org.apache.spark.deploy.SparkHadoopUtil
@@ -187,7 +187,7 @@ class ExecutorClassLoader(
 }
 
 class ConstructorCleaner(className: String, cv: ClassVisitor)
-extends ClassVisitor(ASM6, cv) {
+extends ClassVisitor(ASM7, cv) {
   override def visitMethod(access: Int, name: String, desc: String,
       sig: String, exceptions: Array[String]): MethodVisitor = {
     val mv = cv.visitMethod(access, name, desc, sig, exceptions)

http://git-wip-us.apache.org/repos/asf/spark/blob/3ed91c9b/sql/core/pom.xml
----------------------------------------------------------------------
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index 2f72ff6..95e98c5 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -118,7 +118,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.xbean</groupId>
-      <artifactId>xbean-asm6-shaded</artifactId>
+      <artifactId>xbean-asm7-shaded</artifactId>
     </dependency>
     <dependency>
       <groupId>org.scalacheck</groupId>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to