[ https://issues.apache.org/jira/browse/SPARK-7814?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14590885#comment-14590885 ]
Herman van Hovell tot Westerflier commented on SPARK-7814: ---------------------------------------------------------- I have build spark from the latest source using Hadoop 2.3/2.6 (tried them both), using the following command: {noformat} make-distribution.sh -Phadoop-2.6 -Dhadoop.version=2.6.0 -Phive -Phive-thriftserver {noformat} When execute the following commands: {noformat} val otp = sqlContext.read.parquet("Input/otp.prq") otp.count {noformat} I get the following Janino (Code Generation) error: {noformat} 15/06/17 19:35:51 ERROR TaskSetManager: Task 1 in stage 0.0 failed 1 times; aborting job 15/06/17 19:35:51 ERROR GenerateProjection: failed to compile: import org.apache.spark.sql.catalyst.InternalRow; public SpecificProjection generate(org.apache.spark.sql.catalyst.expressions.Expression[] expr) { return new SpecificProjection(expr); } class SpecificProjection extends org.apache.spark.sql.catalyst.expressions.codegen.BaseProject { private org.apache.spark.sql.catalyst.expressions.Expression[] expressions = null; public SpecificProjection(org.apache.spark.sql.catalyst.expressions.Expression[] expr) { expressions = expr; } @Override public Object apply(Object r) { return new SpecificRow(expressions, (InternalRow) r); } } final class SpecificRow extends org.apache.spark.sql.BaseMutableRow { private long c0 = -1L; public SpecificRow(org.apache.spark.sql.catalyst.expressions.Expression[] expressions, InternalRow i) { { // column0 nullBits[0] = false; if (!false) { c0 = 0L; } } } public int size() { return 1;} protected boolean[] nullBits = new boolean[1]; public void setNullAt(int i) { nullBits[i] = true; } public boolean isNullAt(int i) { return nullBits[i]; } public Object get(int i) { if (isNullAt(i)) return null; switch (i) { case 0: return c0; } return null; } public void update(int i, Object value) { if (value == null) { setNullAt(i); return; } nullBits[i] = false; switch (i) { case 0: { c0 = (Long)value; return;} } } @Override public long getLong(int i) { if (isNullAt(i)) { return -1L; } switch (i) { case 0: return c0; } throw new IllegalArgumentException("Invalid index: " + i + " in getLong"); } @Override public void setLong(int i, long value) { nullBits[i] = false; switch (i) { case 0: { c0 = value; return; } } throw new IllegalArgumentException("Invalid index: " + i + " in setLong"); } @Override public int hashCode() { int result = 37; result *= 37; result += isNullAt(0) ? 0 : (c0 ^ (c0 >>> 32)); return result; } @Override public boolean equals(Object other) { if (other instanceof SpecificRow) { SpecificRow row = (SpecificRow) other; if (nullBits[0] != row.nullBits[0] || (!nullBits[0] && !(c0 == row.c0))) { return false; } return true; } return super.equals(other); } } org.codehaus.commons.compiler.CompileException: Line 16, Column 33: Object at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897) at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331) at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207) at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188) at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119) at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880) at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159) at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533) at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835) at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063) at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849) at org.codehaus.janino.IClass.getIMethods(IClass.java:211) at org.codehaus.janino.IClass.getIMethods(IClass.java:199) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662) at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350) at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035) at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354) at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393) at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347) at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139) at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354) at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322) at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383) at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315) at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233) at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192) at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84) at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77) at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245) at org.apache.spark.sql.catalyst.expressions.codegen.GenerateProjection$.create(GenerateProjection.scala:223) at org.apache.spark.sql.catalyst.expressions.codegen.GenerateProjection$.create(GenerateProjection.scala:35) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272) at org.spark-project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599) at org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) at org.spark-project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342) at org.spark-project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) at org.spark-project.guava.cache.LocalCache.get(LocalCache.java:4000) at org.spark-project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) at org.spark-project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282) at org.apache.spark.sql.execution.SparkPlan.newProjection(SparkPlan.scala:160) at org.apache.spark.sql.execution.GeneratedAggregate$$anonfun$8.apply(GeneratedAggregate.scala:249) at org.apache.spark.sql.execution.GeneratedAggregate$$anonfun$8.apply(GeneratedAggregate.scala:246) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:70) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41) at org.apache.spark.scheduler.Task.run(Task.scala:70) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.ClassNotFoundException: Object at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:69) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:278) at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78) at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254) at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893) ... 67 more Caused by: java.lang.ClassNotFoundException: Object at java.lang.ClassLoader.findClass(ClassLoader.java:531) at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30) at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:64) ... 74 more 15/06/17 19:35:51 ERROR Executor: Exception in task 8.0 in stage 0.0 (TID 8) java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 16, Column 33: Object at org.spark-project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306) at org.spark-project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293) at org.spark-project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116) at org.spark-project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135) at org.spark-project.guava.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410) at org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2380) at org.spark-project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342) at org.spark-project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) at org.spark-project.guava.cache.LocalCache.get(LocalCache.java:4000) at org.spark-project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) at org.spark-project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282) at org.apache.spark.sql.execution.SparkPlan.newProjection(SparkPlan.scala:160) at org.apache.spark.sql.execution.GeneratedAggregate$$anonfun$8.apply(GeneratedAggregate.scala:249) at org.apache.spark.sql.execution.GeneratedAggregate$$anonfun$8.apply(GeneratedAggregate.scala:246) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:70) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41) at org.apache.spark.scheduler.Task.run(Task.scala:70) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) Caused by: org.codehaus.commons.compiler.CompileException: Line 16, Column 33: Object at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897) at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331) at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207) at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188) at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119) at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880) at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159) at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533) at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835) at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063) at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849) at org.codehaus.janino.IClass.getIMethods(IClass.java:211) at org.codehaus.janino.IClass.getIMethods(IClass.java:199) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662) at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350) at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035) at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354) at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393) at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347) at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139) at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354) at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322) at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383) at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315) at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233) at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192) at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84) at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77) at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245) at org.apache.spark.sql.catalyst.expressions.codegen.GenerateProjection$.create(GenerateProjection.scala:223) at org.apache.spark.sql.catalyst.expressions.codegen.GenerateProjection$.create(GenerateProjection.scala:35) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272) at org.spark-project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599) at org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) ... 25 more Caused by: java.lang.ClassNotFoundException: Object at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:69) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:278) at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78) at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254) at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893) ... 67 more Caused by: java.lang.ClassNotFoundException: Object at java.lang.ClassLoader.findClass(ClassLoader.java:531) at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30) at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:64) ... 74 more org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 0.0 failed 1 times, most recent failure: Lost task 1.0 in stage 0.0 (TID 1, localhost): java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 16, Column 33: Object at org.spark-project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306) at org.spark-project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293) at org.spark-project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116) at org.spark-project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135) at org.spark-project.guava.cache.LocalCache$LoadingValueReference.waitForValue(LocalCache.java:3620) at org.spark-project.guava.cache.LocalCache$Segment.waitForLoadingValue(LocalCache.java:2362) at org.spark-project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2349) at org.spark-project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) at org.spark-project.guava.cache.LocalCache.get(LocalCache.java:4000) at org.spark-project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) at org.spark-project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282) at org.apache.spark.sql.execution.SparkPlan.newProjection(SparkPlan.scala:160) at org.apache.spark.sql.execution.GeneratedAggregate$$anonfun$8.apply(GeneratedAggregate.scala:249) at org.apache.spark.sql.execution.GeneratedAggregate$$anonfun$8.apply(GeneratedAggregate.scala:246) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:70) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41) at org.apache.spark.scheduler.Task.run(Task.scala:70) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) Caused by: org.codehaus.commons.compiler.CompileException: Line 16, Column 33: Object at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897) at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331) at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207) at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188) at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119) at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880) at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159) at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533) at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835) at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063) at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849) at org.codehaus.janino.IClass.getIMethods(IClass.java:211) at org.codehaus.janino.IClass.getIMethods(IClass.java:199) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662) at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350) at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035) at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354) at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393) at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185) at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347) at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139) at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354) at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322) at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383) at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315) at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233) at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192) at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84) at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77) at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245) at org.apache.spark.sql.catalyst.expressions.codegen.GenerateProjection$.create(GenerateProjection.scala:223) at org.apache.spark.sql.catalyst.expressions.codegen.GenerateProjection$.create(GenerateProjection.scala:35) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272) at org.spark-project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599) at org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) at org.spark-project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342) ... 24 more Caused by: java.lang.ClassNotFoundException: Object at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:69) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:278) at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78) at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254) at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893) ... 67 more Caused by: java.lang.ClassNotFoundException: Object at java.lang.ClassLoader.findClass(ClassLoader.java:531) at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30) at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:64) ... 74 more Driver stacktrace: at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1285) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1276) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1275) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1275) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749) at scala.Option.foreach(Option.scala:236) at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:749) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1484) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1445) at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) {noformat} I am on Java 1.7.0_80. Any thoughts? > Turn code generation on by default > ---------------------------------- > > Key: SPARK-7814 > URL: https://issues.apache.org/jira/browse/SPARK-7814 > Project: Spark > Issue Type: Improvement > Components: SQL > Reporter: Reynold Xin > Assignee: Davies Liu > Fix For: 1.5.0 > > -- This message was sent by Atlassian JIRA (v6.3.4#6332) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org