Steven Aerts created SPARK-21211:
------------------------------------
Summary: Codegen CompileException:
ExternalMapToCatalyst_value_isNull0 is not an rvalue
Key: SPARK-21211
URL: https://issues.apache.org/jira/browse/SPARK-21211
Project: Spark
Issue Type: Bug
Components: SQL
Affects Versions: 2.1.1, 2.2.0
Reporter: Steven Aerts
Priority: Critical
The following code:
{code}
case class Person(name: String)
case class Register(register: Map[String, Person])
spark.createDataset(Seq(Register(Map("steven" -> Person("steven")))))
{code}
Fails with the following exception:
{code}
17/06/26 14:45:04 ERROR CodeGenerator: failed to compile:
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 61,
Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */ return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends
org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */ private Object[] references;
/* 008 */ private java.lang.String argValue;
/* 009 */ private Object[] values;
/* 010 */ private boolean resultIsNull;
/* 011 */ private java.lang.String argValue1;
/* 012 */ private boolean isNull13;
/* 013 */ private boolean value13;
/* 014 */ private boolean isNull14;
/* 015 */ private InternalRow value14;
/* 016 */ private boolean isNull15;
/* 017 */ private InternalRow value15;
/* 018 */ private UnsafeRow result;
/* 019 */ private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 020 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 021 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
/* 022 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter
arrayWriter1;
/* 023 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
/* 024 */
/* 025 */ public SpecificUnsafeProjection(Object[] references) {
/* 026 */ this.references = references;
/* 027 */
/* 028 */ values = null;
/* 029 */
/* 030 */
/* 031 */ isNull13 = false;
/* 032 */ value13 = false;
/* 033 */ isNull14 = false;
/* 034 */ value14 = null;
/* 035 */ isNull15 = false;
/* 036 */ value15 = null;
/* 037 */ result = new UnsafeRow(1);
/* 038 */ holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
/* 039 */ rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 040 */ arrayWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 041 */ arrayWriter1 = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 042 */ rowWriter1 = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 043 */
/* 044 */ }
/* 045 */
/* 046 */ public void initialize(int partitionIndex) {
/* 047 */
/* 048 */ }
/* 049 */
/* 050 */
/* 051 */ private void evalIfTrueExpr(InternalRow i) {
/* 052 */ final InternalRow value8 = null;
/* 053 */ isNull14 = true;
/* 054 */ value14 = value8;
/* 055 */ }
/* 056 */
/* 057 */
/* 058 */ private void evalIfCondExpr(InternalRow i) {
/* 059 */
/* 060 */ isNull13 = false;
/* 061 */ value13 = ExternalMapToCatalyst_value_isNull0;
/* 062 */ }
/* 063 */
/* 064 */
/* 065 */ private void evalIfFalseExpr(InternalRow i) {
/* 066 */ values = new Object[1];
/* 067 */ resultIsNull = false;
/* 068 */ if (!resultIsNull) {
/* 069 */
/* 070 */ if (ExternalMapToCatalyst_value_isNull0) {
/* 071 */ throw new NullPointerException(((java.lang.String)
references[2]));
/* 072 */ }
/* 073 */ boolean isNull11 = true;
/* 074 */ java.lang.String value11 = null;
/* 075 */ if (!false) {
/* 076 */
/* 077 */ isNull11 = false;
/* 078 */ if (!isNull11) {
/* 079 */
/* 080 */ Object funcResult1 = null;
/* 081 */ funcResult1 = ExternalMapToCatalyst_value0.name();
/* 082 */
/* 083 */ if (funcResult1 != null) {
/* 084 */ value11 = (java.lang.String) funcResult1;
/* 085 */ } else {
/* 086 */ isNull11 = true;
/* 087 */ }
/* 088 */
/* 089 */
/* 090 */ }
/* 091 */ }
/* 092 */ resultIsNull = isNull11;
/* 093 */ argValue1 = value11;
/* 094 */ }
/* 095 */
/* 096 */ boolean isNull10 = resultIsNull;
/* 097 */ final UTF8String value10 = resultIsNull ? null :
org.apache.spark.unsafe.types.UTF8String.fromString(argValue1);
/* 098 */ isNull10 = value10 == null;
/* 099 */ if (isNull10) {
/* 100 */ values[0] = null;
/* 101 */ } else {
/* 102 */ values[0] = value10;
/* 103 */ }
/* 104 */ final InternalRow value9 = new
org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
/* 105 */ values = null;
/* 106 */ isNull15 = false;
/* 107 */ value15 = value9;
/* 108 */ }
/* 109 */
/* 110 */
/* 111 */ // Scala.Function1 need this
/* 112 */ public java.lang.Object apply(java.lang.Object row) {
/* 113 */ return apply((InternalRow) row);
/* 114 */ }
/* 115 */
/* 116 */ public UnsafeRow apply(InternalRow i) {
/* 117 */ holder.reset();
/* 118 */
/* 119 */ rowWriter.zeroOutNullBytes();
/* 120 */
/* 121 */
/* 122 */ boolean isNull4 = i.isNullAt(0);
/* 123 */ $line19.$read$$iw$$iw$Register value4 = isNull4 ? null :
(($line19.$read$$iw$$iw$Register)i.get(0, null));
/* 124 */
/* 125 */ if (isNull4) {
/* 126 */ throw new NullPointerException(((java.lang.String)
references[0]));
/* 127 */ }
/* 128 */
/* 129 */ if (false) {
/* 130 */ throw new NullPointerException(((java.lang.String)
references[1]));
/* 131 */ }
/* 132 */ boolean isNull1 = true;
/* 133 */ scala.collection.immutable.Map value1 = null;
/* 134 */ if (!false) {
/* 135 */
/* 136 */ isNull1 = false;
/* 137 */ if (!isNull1) {
/* 138 */
/* 139 */ Object funcResult = null;
/* 140 */ funcResult = value4.register();
/* 141 */
/* 142 */ if (funcResult != null) {
/* 143 */ value1 = (scala.collection.immutable.Map) funcResult;
/* 144 */ } else {
/* 145 */ isNull1 = true;
/* 146 */ }
/* 147 */
/* 148 */
/* 149 */ }
/* 150 */ }
/* 151 */ MapData value = null;
/* 152 */ if (!isNull1) {
/* 153 */ final int length = value1.size();
/* 154 */ final Object[] convertedKeys = new Object[length];
/* 155 */ final Object[] convertedValues = new Object[length];
/* 156 */ int index = 0;
/* 157 */ final scala.collection.Iterator entries = value1.iterator();
/* 158 */ while(entries.hasNext()) {
/* 159 */
/* 160 */ final scala.Tuple2 entry = (scala.Tuple2) entries.next();
/* 161 */ java.lang.String ExternalMapToCatalyst_key0 =
(java.lang.String) entry._1();
/* 162 */ $line18.$read$$iw$$iw$Person ExternalMapToCatalyst_value0 =
($line18.$read$$iw$$iw$Person) entry._2();
/* 163 */
/* 164 */ boolean ExternalMapToCatalyst_value_isNull0 =
ExternalMapToCatalyst_value0 == null;
/* 165 */
/* 166 */
/* 167 */ argValue = ExternalMapToCatalyst_key0;
/* 168 */
/* 169 */ boolean isNull5 = false;
/* 170 */ final UTF8String value5 = false ? null :
org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
/* 171 */ isNull5 = value5 == null;
/* 172 */ if (isNull5) {
/* 173 */ throw new RuntimeException("Cannot use null as map key!");
/* 174 */ } else {
/* 175 */ convertedKeys[index] = (UTF8String) value5;
/* 176 */ }
/* 177 */
/* 178 */
/* 179 */ evalIfCondExpr(i);
/* 180 */ boolean isNull6 = false;
/* 181 */ InternalRow value6 = null;
/* 182 */ if (!isNull13 && value13) {
/* 183 */ evalIfTrueExpr(i);
/* 184 */ isNull6 = isNull14;
/* 185 */ value6 = value14;
/* 186 */ } else {
/* 187 */ evalIfFalseExpr(i);
/* 188 */ isNull6 = isNull15;
/* 189 */ value6 = value15;
/* 190 */ }
/* 191 */ if (isNull6) {
/* 192 */ convertedValues[index] = null;
/* 193 */ } else {
/* 194 */ convertedValues[index] = (InternalRow) value6;
/* 195 */ }
/* 196 */
/* 197 */ index++;
/* 198 */ }
/* 199 */
/* 200 */ value = new
org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
/* 201 */ }
/* 202 */ if (isNull1) {
/* 203 */ rowWriter.setNullAt(0);
/* 204 */ } else {
/* 205 */ // Remember the current cursor so that we can calculate how
many bytes are
/* 206 */ // written later.
/* 207 */ final int tmpCursor = holder.cursor;
/* 208 */
/* 209 */ if (value instanceof UnsafeMapData) {
/* 210 */
/* 211 */ final int sizeInBytes = ((UnsafeMapData)
value).getSizeInBytes();
/* 212 */ // grow the global buffer before writing data.
/* 213 */ holder.grow(sizeInBytes);
/* 214 */ ((UnsafeMapData) value).writeToMemory(holder.buffer,
holder.cursor);
/* 215 */ holder.cursor += sizeInBytes;
/* 216 */
/* 217 */ } else {
/* 218 */ final ArrayData keys = value.keyArray();
/* 219 */ final ArrayData values1 = value.valueArray();
/* 220 */
/* 221 */ // preserve 8 bytes to write the key array numBytes later.
/* 222 */ holder.grow(8);
/* 223 */ holder.cursor += 8;
/* 224 */
/* 225 */ // Remember the current cursor so that we can write numBytes
of key array later.
/* 226 */ final int tmpCursor1 = holder.cursor;
/* 227 */
/* 228 */
/* 229 */ if (keys instanceof UnsafeArrayData) {
/* 230 */
/* 231 */ final int sizeInBytes1 = ((UnsafeArrayData)
keys).getSizeInBytes();
/* 232 */ // grow the global buffer before writing data.
/* 233 */ holder.grow(sizeInBytes1);
/* 234 */ ((UnsafeArrayData) keys).writeToMemory(holder.buffer,
holder.cursor);
/* 235 */ holder.cursor += sizeInBytes1;
/* 236 */
/* 237 */ } else {
/* 238 */ final int numElements = keys.numElements();
/* 239 */ arrayWriter.initialize(holder, numElements, 8);
/* 240 */
/* 241 */ for (int index1 = 0; index1 < numElements; index1++) {
/* 242 */ if (keys.isNullAt(index1)) {
/* 243 */ arrayWriter.setNull(index1);
/* 244 */ } else {
/* 245 */ final UTF8String element = keys.getUTF8String(index1);
/* 246 */ arrayWriter.write(index1, element);
/* 247 */ }
/* 248 */ }
/* 249 */ }
/* 250 */
/* 251 */ // Write the numBytes of key array into the first 8 bytes.
/* 252 */ Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor
- tmpCursor1);
/* 253 */
/* 254 */
/* 255 */ if (values1 instanceof UnsafeArrayData) {
/* 256 */
/* 257 */ final int sizeInBytes3 = ((UnsafeArrayData)
values1).getSizeInBytes();
/* 258 */ // grow the global buffer before writing data.
/* 259 */ holder.grow(sizeInBytes3);
/* 260 */ ((UnsafeArrayData) values1).writeToMemory(holder.buffer,
holder.cursor);
/* 261 */ holder.cursor += sizeInBytes3;
/* 262 */
/* 263 */ } else {
/* 264 */ final int numElements1 = values1.numElements();
/* 265 */ arrayWriter1.initialize(holder, numElements1, 8);
/* 266 */
/* 267 */ for (int index2 = 0; index2 < numElements1; index2++) {
/* 268 */ if (values1.isNullAt(index2)) {
/* 269 */ arrayWriter1.setNull(index2);
/* 270 */ } else {
/* 271 */ final InternalRow element1 = values1.getStruct(index2,
1);
/* 272 */
/* 273 */ final int tmpCursor3 = holder.cursor;
/* 274 */
/* 275 */ if (element1 instanceof UnsafeRow) {
/* 276 */
/* 277 */ final int sizeInBytes2 = ((UnsafeRow)
element1).getSizeInBytes();
/* 278 */ // grow the global buffer before writing data.
/* 279 */ holder.grow(sizeInBytes2);
/* 280 */ ((UnsafeRow) element1).writeToMemory(holder.buffer,
holder.cursor);
/* 281 */ holder.cursor += sizeInBytes2;
/* 282 */
/* 283 */ } else {
/* 284 */ rowWriter1.reset();
/* 285 */
/* 286 */
/* 287 */ boolean isNull16 = element1.isNullAt(0);
/* 288 */ UTF8String value16 = isNull16 ? null :
element1.getUTF8String(0);
/* 289 */
/* 290 */ if (isNull16) {
/* 291 */ rowWriter1.setNullAt(0);
/* 292 */ } else {
/* 293 */ rowWriter1.write(0, value16);
/* 294 */ }
/* 295 */ }
/* 296 */
/* 297 */ arrayWriter1.setOffsetAndSize(index2, tmpCursor3,
holder.cursor - tmpCursor3);
/* 298 */
/* 299 */ }
/* 300 */ }
/* 301 */ }
/* 302 */
/* 303 */ }
/* 304 */
/* 305 */ rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor -
tmpCursor);
/* 306 */ }
/* 307 */ result.setTotalSize(holder.totalSize());
/* 308 */ return result;
/* 309 */ }
/* 310 */
/* 311 */
/* 312 */
/* 313 */ }
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 61,
Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
at
org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:11004)
at
org.codehaus.janino.UnitCompiler.toRvalueOrCompileException(UnitCompiler.java:6639)
at
org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:5001)
at org.codehaus.janino.UnitCompiler.access$10500(UnitCompiler.java:206)
at
org.codehaus.janino.UnitCompiler$13.visitAmbiguousName(UnitCompiler.java:4984)
at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:3633)
at org.codehaus.janino.Java$Lvalue.accept(Java.java:3563)
at
org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4956)
at
org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4925)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3189)
at org.codehaus.janino.UnitCompiler.access$5100(UnitCompiler.java:206)
at
org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3143)
at
org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3139)
at org.codehaus.janino.Java$Assignment.accept(Java.java:3847)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3139)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2112)
at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:206)
at
org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1377)
at
org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1370)
at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2558)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
at
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2811)
at
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1262)
at
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1234)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:538)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:890)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:894)
at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:206)
at
org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:377)
at
org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:369)
at
org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1128)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
at
org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1209)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:564)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:420)
at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:206)
at
org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:374)
at
org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:369)
at
org.codehaus.janino.Java$AbstractPackageMemberClassDeclaration.accept(Java.java:1309)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:345)
at
org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:396)
at
org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:311)
at
org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:229)
at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:196)
at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:91)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1054)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1121)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1118)
at
org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
at
org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
at
org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
at
org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
at
org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
at
org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1000)
at
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:415)
at
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:366)
at
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:984)
at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:264)
at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:264)
at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:288)
at
org.apache.spark.sql.SparkSession$$anonfun$3.apply(SparkSession.scala:460)
at
org.apache.spark.sql.SparkSession$$anonfun$3.apply(SparkSession.scala:460)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.immutable.List.foreach(List.scala:381)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.immutable.List.map(List.scala:285)
at
org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:460)
at $line20.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:28)
at $line20.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:33)
at $line20.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:35)
at $line20.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:37)
at $line20.$read$$iw$$iw$$iw$$iw.<init>(<console>:39)
at $line20.$read$$iw$$iw$$iw.<init>(<console>:41)
at $line20.$read$$iw$$iw.<init>(<console>:43)
at $line20.$read$$iw.<init>(<console>:45)
at $line20.$read.<init>(<console>:47)
at $line20.$read$.<init>(<console>:51)
at $line20.$read$.<clinit>(<console>)
at $line20.$eval$.$print$lzycompute(<console>:7)
at $line20.$eval$.$print(<console>:6)
at $line20.$eval.$print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)
at
scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)
at
scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
at
scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
at
scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
at
scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
at
scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)
at
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)
at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)
at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)
at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)
at
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)
at
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
at
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
at
scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)
at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)
at org.apache.spark.repl.Main$.doMain(Main.scala:70)
at org.apache.spark.repl.Main$.main(Main.scala:53)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:755)
at
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
java.lang.RuntimeException: Error while encoding:
java.util.concurrent.ExecutionException:
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 61,
Column 46: failed to compile: org.codehaus.commons.compiler.CompileException:
File 'generated.java', Line 61, Column 46: Expression
"ExternalMapToCatalyst_value_isNull0" is not an rvalue
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */ return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends
org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */ private Object[] references;
/* 008 */ private java.lang.String argValue;
/* 009 */ private Object[] values;
/* 010 */ private boolean resultIsNull;
/* 011 */ private java.lang.String argValue1;
/* 012 */ private boolean isNull13;
/* 013 */ private boolean value13;
/* 014 */ private boolean isNull14;
/* 015 */ private InternalRow value14;
/* 016 */ private boolean isNull15;
/* 017 */ private InternalRow value15;
/* 018 */ private UnsafeRow result;
/* 019 */ private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 020 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 021 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
/* 022 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter
arrayWriter1;
/* 023 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
/* 024 */
/* 025 */ public SpecificUnsafeProjection(Object[] references) {
/* 026 */ this.references = references;
/* 027 */
/* 028 */ values = null;
/* 029 */
/* 030 */
/* 031 */ isNull13 = false;
/* 032 */ value13 = false;
/* 033 */ isNull14 = false;
/* 034 */ value14 = null;
/* 035 */ isNull15 = false;
/* 036 */ value15 = null;
/* 037 */ result = new UnsafeRow(1);
/* 038 */ holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
/* 039 */ rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 040 */ arrayWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 041 */ arrayWriter1 = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 042 */ rowWriter1 = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 043 */
/* 044 */ }
/* 045 */
/* 046 */ public void initialize(int partitionIndex) {
/* 047 */
/* 048 */ }
/* 049 */
/* 050 */
/* 051 */ private void evalIfTrueExpr(InternalRow i) {
/* 052 */ final InternalRow value8 = null;
/* 053 */ isNull14 = true;
/* 054 */ value14 = value8;
/* 055 */ }
/* 056 */
/* 057 */
/* 058 */ private void evalIfCondExpr(InternalRow i) {
/* 059 */
/* 060 */ isNull13 = false;
/* 061 */ value13 = ExternalMapToCatalyst_value_isNull0;
/* 062 */ }
/* 063 */
/* 064 */
/* 065 */ private void evalIfFalseExpr(InternalRow i) {
/* 066 */ values = new Object[1];
/* 067 */ resultIsNull = false;
/* 068 */ if (!resultIsNull) {
/* 069 */
/* 070 */ if (ExternalMapToCatalyst_value_isNull0) {
/* 071 */ throw new NullPointerException(((java.lang.String)
references[2]));
/* 072 */ }
/* 073 */ boolean isNull11 = true;
/* 074 */ java.lang.String value11 = null;
/* 075 */ if (!false) {
/* 076 */
/* 077 */ isNull11 = false;
/* 078 */ if (!isNull11) {
/* 079 */
/* 080 */ Object funcResult1 = null;
/* 081 */ funcResult1 = ExternalMapToCatalyst_value0.name();
/* 082 */
/* 083 */ if (funcResult1 != null) {
/* 084 */ value11 = (java.lang.String) funcResult1;
/* 085 */ } else {
/* 086 */ isNull11 = true;
/* 087 */ }
/* 088 */
/* 089 */
/* 090 */ }
/* 091 */ }
/* 092 */ resultIsNull = isNull11;
/* 093 */ argValue1 = value11;
/* 094 */ }
/* 095 */
/* 096 */ boolean isNull10 = resultIsNull;
/* 097 */ final UTF8String value10 = resultIsNull ? null :
org.apache.spark.unsafe.types.UTF8String.fromString(argValue1);
/* 098 */ isNull10 = value10 == null;
/* 099 */ if (isNull10) {
/* 100 */ values[0] = null;
/* 101 */ } else {
/* 102 */ values[0] = value10;
/* 103 */ }
/* 104 */ final InternalRow value9 = new
org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
/* 105 */ values = null;
/* 106 */ isNull15 = false;
/* 107 */ value15 = value9;
/* 108 */ }
/* 109 */
/* 110 */
/* 111 */ // Scala.Function1 need this
/* 112 */ public java.lang.Object apply(java.lang.Object row) {
/* 113 */ return apply((InternalRow) row);
/* 114 */ }
/* 115 */
/* 116 */ public UnsafeRow apply(InternalRow i) {
/* 117 */ holder.reset();
/* 118 */
/* 119 */ rowWriter.zeroOutNullBytes();
/* 120 */
/* 121 */
/* 122 */ boolean isNull4 = i.isNullAt(0);
/* 123 */ Register value4 = isNull4 ? null : ((Register)i.get(0, null));
/* 124 */
/* 125 */ if (isNull4) {
/* 126 */ throw new NullPointerException(((java.lang.String)
references[0]));
/* 127 */ }
/* 128 */
/* 129 */ if (false) {
/* 130 */ throw new NullPointerException(((java.lang.String)
references[1]));
/* 131 */ }
/* 132 */ boolean isNull1 = true;
/* 133 */ scala.collection.immutable.Map value1 = null;
/* 134 */ if (!false) {
/* 135 */
/* 136 */ isNull1 = false;
/* 137 */ if (!isNull1) {
/* 138 */
/* 139 */ Object funcResult = null;
/* 140 */ funcResult = value4.register();
/* 141 */
/* 142 */ if (funcResult != null) {
/* 143 */ value1 = (scala.collection.immutable.Map) funcResult;
/* 144 */ } else {
/* 145 */ isNull1 = true;
/* 146 */ }
/* 147 */
/* 148 */
/* 149 */ }
/* 150 */ }
/* 151 */ MapData value = null;
/* 152 */ if (!isNull1) {
/* 153 */ final int length = value1.size();
/* 154 */ final Object[] convertedKeys = new Object[length];
/* 155 */ final Object[] convertedValues = new Object[length];
/* 156 */ int index = 0;
/* 157 */ final scala.collection.Iterator entries = value1.iterator();
/* 158 */ while(entries.hasNext()) {
/* 159 */
/* 160 */ final scala.Tuple2 entry = (scala.Tuple2) entries.next();
/* 161 */ java.lang.String ExternalMapToCatalyst_key0 =
(java.lang.String) entry._1();
/* 162 */ Person ExternalMapToCatalyst_value0 = (Person) entry._2();
/* 163 */
/* 164 */ boolean ExternalMapToCatalyst_value_isNull0 =
ExternalMapToCatalyst_value0 == null;
/* 165 */
/* 166 */
/* 167 */ argValue = ExternalMapToCatalyst_key0;
/* 168 */
/* 169 */ boolean isNull5 = false;
/* 170 */ final UTF8String value5 = false ? null :
org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
/* 171 */ isNull5 = value5 == null;
/* 172 */ if (isNull5) {
/* 173 */ throw new RuntimeException("Cannot use null as map key!");
/* 174 */ } else {
/* 175 */ convertedKeys[index] = (UTF8String) value5;
/* 176 */ }
/* 177 */
/* 178 */
/* 179 */ evalIfCondExpr(i);
/* 180 */ boolean isNull6 = false;
/* 181 */ InternalRow value6 = null;
/* 182 */ if (!isNull13 && value13) {
/* 183 */ evalIfTrueExpr(i);
/* 184 */ isNull6 = isNull14;
/* 185 */ value6 = value14;
/* 186 */ } else {
/* 187 */ evalIfFalseExpr(i);
/* 188 */ isNull6 = isNull15;
/* 189 */ value6 = value15;
/* 190 */ }
/* 191 */ if (isNull6) {
/* 192 */ convertedValues[index] = null;
/* 193 */ } else {
/* 194 */ convertedValues[index] = (InternalRow) value6;
/* 195 */ }
/* 196 */
/* 197 */ index++;
/* 198 */ }
/* 199 */
/* 200 */ value = new
org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
/* 201 */ }
/* 202 */ if (isNull1) {
/* 203 */ rowWriter.setNullAt(0);
/* 204 */ } else {
/* 205 */ // Remember the current cursor so that we can calculate how
many bytes are
/* 206 */ // written later.
/* 207 */ final int tmpCursor = holder.cursor;
/* 208 */
/* 209 */ if (value instanceof UnsafeMapData) {
/* 210 */
/* 211 */ final int sizeInBytes = ((UnsafeMapData)
value).getSizeInBytes();
/* 212 */ // grow the global buffer before writing data.
/* 213 */ holder.grow(sizeInBytes);
/* 214 */ ((UnsafeMapData) value).writeToMemory(holder.buffer,
holder.cursor);
/* 215 */ holder.cursor += sizeInBytes;
/* 216 */
/* 217 */ } else {
/* 218 */ final ArrayData keys = value.keyArray();
/* 219 */ final ArrayData values1 = value.valueArray();
/* 220 */
/* 221 */ // preserve 8 bytes to write the key array numBytes later.
/* 222 */ holder.grow(8);
/* 223 */ holder.cursor += 8;
/* 224 */
/* 225 */ // Remember the current cursor so that we can write numBytes
of key array later.
/* 226 */ final int tmpCursor1 = holder.cursor;
/* 227 */
/* 228 */
/* 229 */ if (keys instanceof UnsafeArrayData) {
/* 230 */
/* 231 */ final int sizeInBytes1 = ((UnsafeArrayData)
keys).getSizeInBytes();
/* 232 */ // grow the global buffer before writing data.
/* 233 */ holder.grow(sizeInBytes1);
/* 234 */ ((UnsafeArrayData) keys).writeToMemory(holder.buffer,
holder.cursor);
/* 235 */ holder.cursor += sizeInBytes1;
/* 236 */
/* 237 */ } else {
/* 238 */ final int numElements = keys.numElements();
/* 239 */ arrayWriter.initialize(holder, numElements, 8);
/* 240 */
/* 241 */ for (int index1 = 0; index1 < numElements; index1++) {
/* 242 */ if (keys.isNullAt(index1)) {
/* 243 */ arrayWriter.setNull(index1);
/* 244 */ } else {
/* 245 */ final UTF8String element = keys.getUTF8String(index1);
/* 246 */ arrayWriter.write(index1, element);
/* 247 */ }
/* 248 */ }
/* 249 */ }
/* 250 */
/* 251 */ // Write the numBytes of key array into the first 8 bytes.
/* 252 */ Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor
- tmpCursor1);
/* 253 */
/* 254 */
/* 255 */ if (values1 instanceof UnsafeArrayData) {
/* 256 */
/* 257 */ final int sizeInBytes3 = ((UnsafeArrayData)
values1).getSizeInBytes();
/* 258 */ // grow the global buffer before writing data.
/* 259 */ holder.grow(sizeInBytes3);
/* 260 */ ((UnsafeArrayData) values1).writeToMemory(holder.buffer,
holder.cursor);
/* 261 */ holder.cursor += sizeInBytes3;
/* 262 */
/* 263 */ } else {
/* 264 */ final int numElements1 = values1.numElements();
/* 265 */ arrayWriter1.initialize(holder, numElements1, 8);
/* 266 */
/* 267 */ for (int index2 = 0; index2 < numElements1; index2++) {
/* 268 */ if (values1.isNullAt(index2)) {
/* 269 */ arrayWriter1.setNull(index2);
/* 270 */ } else {
/* 271 */ final InternalRow element1 = values1.getStruct(index2,
1);
/* 272 */
/* 273 */ final int tmpCursor3 = holder.cursor;
/* 274 */
/* 275 */ if (element1 instanceof UnsafeRow) {
/* 276 */
/* 277 */ final int sizeInBytes2 = ((UnsafeRow)
element1).getSizeInBytes();
/* 278 */ // grow the global buffer before writing data.
/* 279 */ holder.grow(sizeInBytes2);
/* 280 */ ((UnsafeRow) element1).writeToMemory(holder.buffer,
holder.cursor);
/* 281 */ holder.cursor += sizeInBytes2;
/* 282 */
/* 283 */ } else {
/* 284 */ rowWriter1.reset();
/* 285 */
/* 286 */
/* 287 */ boolean isNull16 = element1.isNullAt(0);
/* 288 */ UTF8String value16 = isNull16 ? null :
element1.getUTF8String(0);
/* 289 */
/* 290 */ if (isNull16) {
/* 291 */ rowWriter1.setNullAt(0);
/* 292 */ } else {
/* 293 */ rowWriter1.write(0, value16);
/* 294 */ }
/* 295 */ }
/* 296 */
/* 297 */ arrayWriter1.setOffsetAndSize(index2, tmpCursor3,
holder.cursor - tmpCursor3);
/* 298 */
/* 299 */ }
/* 300 */ }
/* 301 */ }
/* 302 */
/* 303 */ }
/* 304 */
/* 305 */ rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor -
tmpCursor);
/* 306 */ }
/* 307 */ result.setTotalSize(holder.totalSize());
/* 308 */ return result;
/* 309 */ }
/* 310 */
/* 311 */
/* 312 */
/* 313 */ }
externalmaptocatalyst(ExternalMapToCatalyst_key0, ObjectType(class
java.lang.String), staticinvoke(class org.apache.spark.unsafe.types.UTF8String,
StringType, fromString, lambdavariable(ExternalMapToCatalyst_key0, false,
ObjectType(class java.lang.String), false), true),
ExternalMapToCatalyst_value0, ExternalMapToCatalyst_value_isNull0,
ObjectType(class Person), if
(isnull(lambdavariable(ExternalMapToCatalyst_value0,
ExternalMapToCatalyst_value_isNull0, ObjectType(class Person), true))) null
else named_struct(name, staticinvoke(class
org.apache.spark.unsafe.types.UTF8String, StringType, fromString,
assertnotnull(lambdavariable(ExternalMapToCatalyst_value0,
ExternalMapToCatalyst_value_isNull0, ObjectType(class Person), true)).name,
true)), assertnotnull(assertnotnull(input[0, Register, true])).register) AS
register#0
at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:291)
at org.apache.spark.sql.SparkSession$$anonfun$3.apply(SparkSession.scala:460)
at org.apache.spark.sql.SparkSession$$anonfun$3.apply(SparkSession.scala:460)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.immutable.List.foreach(List.scala:381)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.immutable.List.map(List.scala:285)
at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:460)
... 48 elided
Caused by: java.util.concurrent.ExecutionException:
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 61,
Column 46: failed to compile: org.codehaus.commons.compiler.CompileException:
File 'generated.java', Line 61, Column 46: Expression
"ExternalMapToCatalyst_value_isNull0" is not an rvalue
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */ return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends
org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */ private Object[] references;
/* 008 */ private java.lang.String argValue;
/* 009 */ private Object[] values;
/* 010 */ private boolean resultIsNull;
/* 011 */ private java.lang.String argValue1;
/* 012 */ private boolean isNull13;
/* 013 */ private boolean value13;
/* 014 */ private boolean isNull14;
/* 015 */ private InternalRow value14;
/* 016 */ private boolean isNull15;
/* 017 */ private InternalRow value15;
/* 018 */ private UnsafeRow result;
/* 019 */ private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 020 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 021 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
/* 022 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter
arrayWriter1;
/* 023 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
/* 024 */
/* 025 */ public SpecificUnsafeProjection(Object[] references) {
/* 026 */ this.references = references;
/* 027 */
/* 028 */ values = null;
/* 029 */
/* 030 */
/* 031 */ isNull13 = false;
/* 032 */ value13 = false;
/* 033 */ isNull14 = false;
/* 034 */ value14 = null;
/* 035 */ isNull15 = false;
/* 036 */ value15 = null;
/* 037 */ result = new UnsafeRow(1);
/* 038 */ holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
/* 039 */ rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 040 */ arrayWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 041 */ arrayWriter1 = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 042 */ rowWriter1 = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 043 */
/* 044 */ }
/* 045 */
/* 046 */ public void initialize(int partitionIndex) {
/* 047 */
/* 048 */ }
/* 049 */
/* 050 */
/* 051 */ private void evalIfTrueExpr(InternalRow i) {
/* 052 */ final InternalRow value8 = null;
/* 053 */ isNull14 = true;
/* 054 */ value14 = value8;
/* 055 */ }
/* 056 */
/* 057 */
/* 058 */ private void evalIfCondExpr(InternalRow i) {
/* 059 */
/* 060 */ isNull13 = false;
/* 061 */ value13 = ExternalMapToCatalyst_value_isNull0;
/* 062 */ }
/* 063 */
/* 064 */
/* 065 */ private void evalIfFalseExpr(InternalRow i) {
/* 066 */ values = new Object[1];
/* 067 */ resultIsNull = false;
/* 068 */ if (!resultIsNull) {
/* 069 */
/* 070 */ if (ExternalMapToCatalyst_value_isNull0) {
/* 071 */ throw new NullPointerException(((java.lang.String)
references[2]));
/* 072 */ }
/* 073 */ boolean isNull11 = true;
/* 074 */ java.lang.String value11 = null;
/* 075 */ if (!false) {
/* 076 */
/* 077 */ isNull11 = false;
/* 078 */ if (!isNull11) {
/* 079 */
/* 080 */ Object funcResult1 = null;
/* 081 */ funcResult1 = ExternalMapToCatalyst_value0.name();
/* 082 */
/* 083 */ if (funcResult1 != null) {
/* 084 */ value11 = (java.lang.String) funcResult1;
/* 085 */ } else {
/* 086 */ isNull11 = true;
/* 087 */ }
/* 088 */
/* 089 */
/* 090 */ }
/* 091 */ }
/* 092 */ resultIsNull = isNull11;
/* 093 */ argValue1 = value11;
/* 094 */ }
/* 095 */
/* 096 */ boolean isNull10 = resultIsNull;
/* 097 */ final UTF8String value10 = resultIsNull ? null :
org.apache.spark.unsafe.types.UTF8String.fromString(argValue1);
/* 098 */ isNull10 = value10 == null;
/* 099 */ if (isNull10) {
/* 100 */ values[0] = null;
/* 101 */ } else {
/* 102 */ values[0] = value10;
/* 103 */ }
/* 104 */ final InternalRow value9 = new
org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
/* 105 */ values = null;
/* 106 */ isNull15 = false;
/* 107 */ value15 = value9;
/* 108 */ }
/* 109 */
/* 110 */
/* 111 */ // Scala.Function1 need this
/* 112 */ public java.lang.Object apply(java.lang.Object row) {
/* 113 */ return apply((InternalRow) row);
/* 114 */ }
/* 115 */
/* 116 */ public UnsafeRow apply(InternalRow i) {
/* 117 */ holder.reset();
/* 118 */
/* 119 */ rowWriter.zeroOutNullBytes();
/* 120 */
/* 121 */
/* 122 */ boolean isNull4 = i.isNullAt(0);
/* 123 */ Register value4 = isNull4 ? null : ((Register)i.get(0, null));
/* 124 */
/* 125 */ if (isNull4) {
/* 126 */ throw new NullPointerException(((java.lang.String)
references[0]));
/* 127 */ }
/* 128 */
/* 129 */ if (false) {
/* 130 */ throw new NullPointerException(((java.lang.String)
references[1]));
/* 131 */ }
/* 132 */ boolean isNull1 = true;
/* 133 */ scala.collection.immutable.Map value1 = null;
/* 134 */ if (!false) {
/* 135 */
/* 136 */ isNull1 = false;
/* 137 */ if (!isNull1) {
/* 138 */
/* 139 */ Object funcResult = null;
/* 140 */ funcResult = value4.register();
/* 141 */
/* 142 */ if (funcResult != null) {
/* 143 */ value1 = (scala.collection.immutable.Map) funcResult;
/* 144 */ } else {
/* 145 */ isNull1 = true;
/* 146 */ }
/* 147 */
/* 148 */
/* 149 */ }
/* 150 */ }
/* 151 */ MapData value = null;
/* 152 */ if (!isNull1) {
/* 153 */ final int length = value1.size();
/* 154 */ final Object[] convertedKeys = new Object[length];
/* 155 */ final Object[] convertedValues = new Object[length];
/* 156 */ int index = 0;
/* 157 */ final scala.collection.Iterator entries = value1.iterator();
/* 158 */ while(entries.hasNext()) {
/* 159 */
/* 160 */ final scala.Tuple2 entry = (scala.Tuple2) entries.next();
/* 161 */ java.lang.String ExternalMapToCatalyst_key0 =
(java.lang.String) entry._1();
/* 162 */ Person ExternalMapToCatalyst_value0 = (Person) entry._2();
/* 163 */
/* 164 */ boolean ExternalMapToCatalyst_value_isNull0 =
ExternalMapToCatalyst_value0 == null;
/* 165 */
/* 166 */
/* 167 */ argValue = ExternalMapToCatalyst_key0;
/* 168 */
/* 169 */ boolean isNull5 = false;
/* 170 */ final UTF8String value5 = false ? null :
org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
/* 171 */ isNull5 = value5 == null;
/* 172 */ if (isNull5) {
/* 173 */ throw new RuntimeException("Cannot use null as map key!");
/* 174 */ } else {
/* 175 */ convertedKeys[index] = (UTF8String) value5;
/* 176 */ }
/* 177 */
/* 178 */
/* 179 */ evalIfCondExpr(i);
/* 180 */ boolean isNull6 = false;
/* 181 */ InternalRow value6 = null;
/* 182 */ if (!isNull13 && value13) {
/* 183 */ evalIfTrueExpr(i);
/* 184 */ isNull6 = isNull14;
/* 185 */ value6 = value14;
/* 186 */ } else {
/* 187 */ evalIfFalseExpr(i);
/* 188 */ isNull6 = isNull15;
/* 189 */ value6 = value15;
/* 190 */ }
/* 191 */ if (isNull6) {
/* 192 */ convertedValues[index] = null;
/* 193 */ } else {
/* 194 */ convertedValues[index] = (InternalRow) value6;
/* 195 */ }
/* 196 */
/* 197 */ index++;
/* 198 */ }
/* 199 */
/* 200 */ value = new
org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
/* 201 */ }
/* 202 */ if (isNull1) {
/* 203 */ rowWriter.setNullAt(0);
/* 204 */ } else {
/* 205 */ // Remember the current cursor so that we can calculate how
many bytes are
/* 206 */ // written later.
/* 207 */ final int tmpCursor = holder.cursor;
/* 208 */
/* 209 */ if (value instanceof UnsafeMapData) {
/* 210 */
/* 211 */ final int sizeInBytes = ((UnsafeMapData)
value).getSizeInBytes();
/* 212 */ // grow the global buffer before writing data.
/* 213 */ holder.grow(sizeInBytes);
/* 214 */ ((UnsafeMapData) value).writeToMemory(holder.buffer,
holder.cursor);
/* 215 */ holder.cursor += sizeInBytes;
/* 216 */
/* 217 */ } else {
/* 218 */ final ArrayData keys = value.keyArray();
/* 219 */ final ArrayData values1 = value.valueArray();
/* 220 */
/* 221 */ // preserve 8 bytes to write the key array numBytes later.
/* 222 */ holder.grow(8);
/* 223 */ holder.cursor += 8;
/* 224 */
/* 225 */ // Remember the current cursor so that we can write numBytes
of key array later.
/* 226 */ final int tmpCursor1 = holder.cursor;
/* 227 */
/* 228 */
/* 229 */ if (keys instanceof UnsafeArrayData) {
/* 230 */
/* 231 */ final int sizeInBytes1 = ((UnsafeArrayData)
keys).getSizeInBytes();
/* 232 */ // grow the global buffer before writing data.
/* 233 */ holder.grow(sizeInBytes1);
/* 234 */ ((UnsafeArrayData) keys).writeToMemory(holder.buffer,
holder.cursor);
/* 235 */ holder.cursor += sizeInBytes1;
/* 236 */
/* 237 */ } else {
/* 238 */ final int numElements = keys.numElements();
/* 239 */ arrayWriter.initialize(holder, numElements, 8);
/* 240 */
/* 241 */ for (int index1 = 0; index1 < numElements; index1++) {
/* 242 */ if (keys.isNullAt(index1)) {
/* 243 */ arrayWriter.setNull(index1);
/* 244 */ } else {
/* 245 */ final UTF8String element = keys.getUTF8String(index1);
/* 246 */ arrayWriter.write(index1, element);
/* 247 */ }
/* 248 */ }
/* 249 */ }
/* 250 */
/* 251 */ // Write the numBytes of key array into the first 8 bytes.
/* 252 */ Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor
- tmpCursor1);
/* 253 */
/* 254 */
/* 255 */ if (values1 instanceof UnsafeArrayData) {
/* 256 */
/* 257 */ final int sizeInBytes3 = ((UnsafeArrayData)
values1).getSizeInBytes();
/* 258 */ // grow the global buffer before writing data.
/* 259 */ holder.grow(sizeInBytes3);
/* 260 */ ((UnsafeArrayData) values1).writeToMemory(holder.buffer,
holder.cursor);
/* 261 */ holder.cursor += sizeInBytes3;
/* 262 */
/* 263 */ } else {
/* 264 */ final int numElements1 = values1.numElements();
/* 265 */ arrayWriter1.initialize(holder, numElements1, 8);
/* 266 */
/* 267 */ for (int index2 = 0; index2 < numElements1; index2++) {
/* 268 */ if (values1.isNullAt(index2)) {
/* 269 */ arrayWriter1.setNull(index2);
/* 270 */ } else {
/* 271 */ final InternalRow element1 = values1.getStruct(index2,
1);
/* 272 */
/* 273 */ final int tmpCursor3 = holder.cursor;
/* 274 */
/* 275 */ if (element1 instanceof UnsafeRow) {
/* 276 */
/* 277 */ final int sizeInBytes2 = ((UnsafeRow)
element1).getSizeInBytes();
/* 278 */ // grow the global buffer before writing data.
/* 279 */ holder.grow(sizeInBytes2);
/* 280 */ ((UnsafeRow) element1).writeToMemory(holder.buffer,
holder.cursor);
/* 281 */ holder.cursor += sizeInBytes2;
/* 282 */
/* 283 */ } else {
/* 284 */ rowWriter1.reset();
/* 285 */
/* 286 */
/* 287 */ boolean isNull16 = element1.isNullAt(0);
/* 288 */ UTF8String value16 = isNull16 ? null :
element1.getUTF8String(0);
/* 289 */
/* 290 */ if (isNull16) {
/* 291 */ rowWriter1.setNullAt(0);
/* 292 */ } else {
/* 293 */ rowWriter1.write(0, value16);
/* 294 */ }
/* 295 */ }
/* 296 */
/* 297 */ arrayWriter1.setOffsetAndSize(index2, tmpCursor3,
holder.cursor - tmpCursor3);
/* 298 */
/* 299 */ }
/* 300 */ }
/* 301 */ }
/* 302 */
/* 303 */ }
/* 304 */
/* 305 */ rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor -
tmpCursor);
/* 306 */ }
/* 307 */ result.setTotalSize(holder.totalSize());
/* 308 */ return result;
/* 309 */ }
/* 310 */
/* 311 */
/* 312 */
/* 313 */ }
at
org.spark_project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
at
org.spark_project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
at
org.spark_project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
at
org.spark_project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
at
org.spark_project.guava.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
at
org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
at
org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
at
org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1000)
at
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:415)
at
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:366)
at
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:984)
at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:264)
at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:264)
at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:288)
... 56 more
Caused by: org.codehaus.commons.compiler.CompileException: File
'generated.java', Line 61, Column 46: failed to compile:
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 61,
Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */ return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends
org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */ private Object[] references;
/* 008 */ private java.lang.String argValue;
/* 009 */ private Object[] values;
/* 010 */ private boolean resultIsNull;
/* 011 */ private java.lang.String argValue1;
/* 012 */ private boolean isNull13;
/* 013 */ private boolean value13;
/* 014 */ private boolean isNull14;
/* 015 */ private InternalRow value14;
/* 016 */ private boolean isNull15;
/* 017 */ private InternalRow value15;
/* 018 */ private UnsafeRow result;
/* 019 */ private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 020 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 021 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
/* 022 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter
arrayWriter1;
/* 023 */ private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
/* 024 */
/* 025 */ public SpecificUnsafeProjection(Object[] references) {
/* 026 */ this.references = references;
/* 027 */
/* 028 */ values = null;
/* 029 */
/* 030 */
/* 031 */ isNull13 = false;
/* 032 */ value13 = false;
/* 033 */ isNull14 = false;
/* 034 */ value14 = null;
/* 035 */ isNull15 = false;
/* 036 */ value15 = null;
/* 037 */ result = new UnsafeRow(1);
/* 038 */ holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
/* 039 */ rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 040 */ arrayWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 041 */ arrayWriter1 = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 042 */ rowWriter1 = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 043 */
/* 044 */ }
/* 045 */
/* 046 */ public void initialize(int partitionIndex) {
/* 047 */
/* 048 */ }
/* 049 */
/* 050 */
/* 051 */ private void evalIfTrueExpr(InternalRow i) {
/* 052 */ final InternalRow value8 = null;
/* 053 */ isNull14 = true;
/* 054 */ value14 = value8;
/* 055 */ }
/* 056 */
/* 057 */
/* 058 */ private void evalIfCondExpr(InternalRow i) {
/* 059 */
/* 060 */ isNull13 = false;
/* 061 */ value13 = ExternalMapToCatalyst_value_isNull0;
/* 062 */ }
/* 063 */
/* 064 */
/* 065 */ private void evalIfFalseExpr(InternalRow i) {
/* 066 */ values = new Object[1];
/* 067 */ resultIsNull = false;
/* 068 */ if (!resultIsNull) {
/* 069 */
/* 070 */ if (ExternalMapToCatalyst_value_isNull0) {
/* 071 */ throw new NullPointerException(((java.lang.String)
references[2]));
/* 072 */ }
/* 073 */ boolean isNull11 = true;
/* 074 */ java.lang.String value11 = null;
/* 075 */ if (!false) {
/* 076 */
/* 077 */ isNull11 = false;
/* 078 */ if (!isNull11) {
/* 079 */
/* 080 */ Object funcResult1 = null;
/* 081 */ funcResult1 = ExternalMapToCatalyst_value0.name();
/* 082 */
/* 083 */ if (funcResult1 != null) {
/* 084 */ value11 = (java.lang.String) funcResult1;
/* 085 */ } else {
/* 086 */ isNull11 = true;
/* 087 */ }
/* 088 */
/* 089 */
/* 090 */ }
/* 091 */ }
/* 092 */ resultIsNull = isNull11;
/* 093 */ argValue1 = value11;
/* 094 */ }
/* 095 */
/* 096 */ boolean isNull10 = resultIsNull;
/* 097 */ final UTF8String value10 = resultIsNull ? null :
org.apache.spark.unsafe.types.UTF8String.fromString(argValue1);
/* 098 */ isNull10 = value10 == null;
/* 099 */ if (isNull10) {
/* 100 */ values[0] = null;
/* 101 */ } else {
/* 102 */ values[0] = value10;
/* 103 */ }
/* 104 */ final InternalRow value9 = new
org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
/* 105 */ values = null;
/* 106 */ isNull15 = false;
/* 107 */ value15 = value9;
/* 108 */ }
/* 109 */
/* 110 */
/* 111 */ // Scala.Function1 need this
/* 112 */ public java.lang.Object apply(java.lang.Object row) {
/* 113 */ return apply((InternalRow) row);
/* 114 */ }
/* 115 */
/* 116 */ public UnsafeRow apply(InternalRow i) {
/* 117 */ holder.reset();
/* 118 */
/* 119 */ rowWriter.zeroOutNullBytes();
/* 120 */
/* 121 */
/* 122 */ boolean isNull4 = i.isNullAt(0);
/* 123 */ Register value4 = isNull4 ? null : ((Register)i.get(0, null));
/* 124 */
/* 125 */ if (isNull4) {
/* 126 */ throw new NullPointerException(((java.lang.String)
references[0]));
/* 127 */ }
/* 128 */
/* 129 */ if (false) {
/* 130 */ throw new NullPointerException(((java.lang.String)
references[1]));
/* 131 */ }
/* 132 */ boolean isNull1 = true;
/* 133 */ scala.collection.immutable.Map value1 = null;
/* 134 */ if (!false) {
/* 135 */
/* 136 */ isNull1 = false;
/* 137 */ if (!isNull1) {
/* 138 */
/* 139 */ Object funcResult = null;
/* 140 */ funcResult = value4.register();
/* 141 */
/* 142 */ if (funcResult != null) {
/* 143 */ value1 = (scala.collection.immutable.Map) funcResult;
/* 144 */ } else {
/* 145 */ isNull1 = true;
/* 146 */ }
/* 147 */
/* 148 */
/* 149 */ }
/* 150 */ }
/* 151 */ MapData value = null;
/* 152 */ if (!isNull1) {
/* 153 */ final int length = value1.size();
/* 154 */ final Object[] convertedKeys = new Object[length];
/* 155 */ final Object[] convertedValues = new Object[length];
/* 156 */ int index = 0;
/* 157 */ final scala.collection.Iterator entries = value1.iterator();
/* 158 */ while(entries.hasNext()) {
/* 159 */
/* 160 */ final scala.Tuple2 entry = (scala.Tuple2) entries.next();
/* 161 */ java.lang.String ExternalMapToCatalyst_key0 =
(java.lang.String) entry._1();
/* 162 */ Person ExternalMapToCatalyst_value0 = (Person) entry._2();
/* 163 */
/* 164 */ boolean ExternalMapToCatalyst_value_isNull0 =
ExternalMapToCatalyst_value0 == null;
/* 165 */
/* 166 */
/* 167 */ argValue = ExternalMapToCatalyst_key0;
/* 168 */
/* 169 */ boolean isNull5 = false;
/* 170 */ final UTF8String value5 = false ? null :
org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
/* 171 */ isNull5 = value5 == null;
/* 172 */ if (isNull5) {
/* 173 */ throw new RuntimeException("Cannot use null as map key!");
/* 174 */ } else {
/* 175 */ convertedKeys[index] = (UTF8String) value5;
/* 176 */ }
/* 177 */
/* 178 */
/* 179 */ evalIfCondExpr(i);
/* 180 */ boolean isNull6 = false;
/* 181 */ InternalRow value6 = null;
/* 182 */ if (!isNull13 && value13) {
/* 183 */ evalIfTrueExpr(i);
/* 184 */ isNull6 = isNull14;
/* 185 */ value6 = value14;
/* 186 */ } else {
/* 187 */ evalIfFalseExpr(i);
/* 188 */ isNull6 = isNull15;
/* 189 */ value6 = value15;
/* 190 */ }
/* 191 */ if (isNull6) {
/* 192 */ convertedValues[index] = null;
/* 193 */ } else {
/* 194 */ convertedValues[index] = (InternalRow) value6;
/* 195 */ }
/* 196 */
/* 197 */ index++;
/* 198 */ }
/* 199 */
/* 200 */ value = new
org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
/* 201 */ }
/* 202 */ if (isNull1) {
/* 203 */ rowWriter.setNullAt(0);
/* 204 */ } else {
/* 205 */ // Remember the current cursor so that we can calculate how
many bytes are
/* 206 */ // written later.
/* 207 */ final int tmpCursor = holder.cursor;
/* 208 */
/* 209 */ if (value instanceof UnsafeMapData) {
/* 210 */
/* 211 */ final int sizeInBytes = ((UnsafeMapData)
value).getSizeInBytes();
/* 212 */ // grow the global buffer before writing data.
/* 213 */ holder.grow(sizeInBytes);
/* 214 */ ((UnsafeMapData) value).writeToMemory(holder.buffer,
holder.cursor);
/* 215 */ holder.cursor += sizeInBytes;
/* 216 */
/* 217 */ } else {
/* 218 */ final ArrayData keys = value.keyArray();
/* 219 */ final ArrayData values1 = value.valueArray();
/* 220 */
/* 221 */ // preserve 8 bytes to write the key array numBytes later.
/* 222 */ holder.grow(8);
/* 223 */ holder.cursor += 8;
/* 224 */
/* 225 */ // Remember the current cursor so that we can write numBytes
of key array later.
/* 226 */ final int tmpCursor1 = holder.cursor;
/* 227 */
/* 228 */
/* 229 */ if (keys instanceof UnsafeArrayData) {
/* 230 */
/* 231 */ final int sizeInBytes1 = ((UnsafeArrayData)
keys).getSizeInBytes();
/* 232 */ // grow the global buffer before writing data.
/* 233 */ holder.grow(sizeInBytes1);
/* 234 */ ((UnsafeArrayData) keys).writeToMemory(holder.buffer,
holder.cursor);
/* 235 */ holder.cursor += sizeInBytes1;
/* 236 */
/* 237 */ } else {
/* 238 */ final int numElements = keys.numElements();
/* 239 */ arrayWriter.initialize(holder, numElements, 8);
/* 240 */
/* 241 */ for (int index1 = 0; index1 < numElements; index1++) {
/* 242 */ if (keys.isNullAt(index1)) {
/* 243 */ arrayWriter.setNull(index1);
/* 244 */ } else {
/* 245 */ final UTF8String element = keys.getUTF8String(index1);
/* 246 */ arrayWriter.write(index1, element);
/* 247 */ }
/* 248 */ }
/* 249 */ }
/* 250 */
/* 251 */ // Write the numBytes of key array into the first 8 bytes.
/* 252 */ Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor
- tmpCursor1);
/* 253 */
/* 254 */
/* 255 */ if (values1 instanceof UnsafeArrayData) {
/* 256 */
/* 257 */ final int sizeInBytes3 = ((UnsafeArrayData)
values1).getSizeInBytes();
/* 258 */ // grow the global buffer before writing data.
/* 259 */ holder.grow(sizeInBytes3);
/* 260 */ ((UnsafeArrayData) values1).writeToMemory(holder.buffer,
holder.cursor);
/* 261 */ holder.cursor += sizeInBytes3;
/* 262 */
/* 263 */ } else {
/* 264 */ final int numElements1 = values1.numElements();
/* 265 */ arrayWriter1.initialize(holder, numElements1, 8);
/* 266 */
/* 267 */ for (int index2 = 0; index2 < numElements1; index2++) {
/* 268 */ if (values1.isNullAt(index2)) {
/* 269 */ arrayWriter1.setNull(index2);
/* 270 */ } else {
/* 271 */ final InternalRow element1 = values1.getStruct(index2,
1);
/* 272 */
/* 273 */ final int tmpCursor3 = holder.cursor;
/* 274 */
/* 275 */ if (element1 instanceof UnsafeRow) {
/* 276 */
/* 277 */ final int sizeInBytes2 = ((UnsafeRow)
element1).getSizeInBytes();
/* 278 */ // grow the global buffer before writing data.
/* 279 */ holder.grow(sizeInBytes2);
/* 280 */ ((UnsafeRow) element1).writeToMemory(holder.buffer,
holder.cursor);
/* 281 */ holder.cursor += sizeInBytes2;
/* 282 */
/* 283 */ } else {
/* 284 */ rowWriter1.reset();
/* 285 */
/* 286 */
/* 287 */ boolean isNull16 = element1.isNullAt(0);
/* 288 */ UTF8String value16 = isNull16 ? null :
element1.getUTF8String(0);
/* 289 */
/* 290 */ if (isNull16) {
/* 291 */ rowWriter1.setNullAt(0);
/* 292 */ } else {
/* 293 */ rowWriter1.write(0, value16);
/* 294 */ }
/* 295 */ }
/* 296 */
/* 297 */ arrayWriter1.setOffsetAndSize(index2, tmpCursor3,
holder.cursor - tmpCursor3);
/* 298 */
/* 299 */ }
/* 300 */ }
/* 301 */ }
/* 302 */
/* 303 */ }
/* 304 */
/* 305 */ rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor -
tmpCursor);
/* 306 */ }
/* 307 */ result.setTotalSize(holder.totalSize());
/* 308 */ return result;
/* 309 */ }
/* 310 */
/* 311 */
/* 312 */
/* 313 */ }
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1064)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1121)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1118)
at
org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
at
org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
... 69 more
{code}
{{ExternalMapToCatalyst_value_isNull0}} is only defined locally however nested
records (here _Person_) try to refer to them. So it probably needs to become a
field instead of a local variable of the generated class.
We originally bumped into this problem in Java using the java bean encoder, so
this problem is not scala specific.
We were able to reproduce it on spark 2.1.0 and 2.2.0 nightly.
--
This message was sent by Atlassian JIRA
(v6.4.14#64029)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]