Sean Owen created SPARK-25029:
---------------------------------

             Summary: Scala 2.12 issues: TaskNotSerializable and Janino "Two 
non-abstract methods ..." errors
                 Key: SPARK-25029
                 URL: https://issues.apache.org/jira/browse/SPARK-25029
             Project: Spark
          Issue Type: Bug
          Components: Build
    Affects Versions: 2.4.0
            Reporter: Sean Owen


We actually still have some test failures in the Scala 2.12 build. There seem 
to be two types. First are that some tests fail with "TaskNotSerializable" 
because some code construct now captures a reference to scalatest's 
AssertionHelper. Example:
{code:java}
- LegacyAccumulatorWrapper with AccumulatorParam that has no equals/hashCode 
*** FAILED *** java.io.NotSerializableException: 
org.scalatest.Assertions$AssertionsHelper Serialization stack: - object not 
serializable (class: org.scalatest.Assertions$AssertionsHelper, value: 
org.scalatest.Assertions$AssertionsHelper@3bc5fc8f){code}
These seem generally easy to fix by tweaking the test code. It's not clear if 
something about closure cleaning in 2.12 could be improved to detect this 
situation automatically; given that yet only a handful of tests fail for this 
reason, it's unlikely to be a systemic problem.

 

The other error is curioser. Janino fails to compile generate code in many 
cases with errors like:
{code:java}
- encode/decode for seq of string: List(abc, xyz) *** FAILED ***
java.lang.RuntimeException: Error while encoding: 
org.codehaus.janino.InternalCompilerException: failed to compile: 
org.codehaus.janino.InternalCompilerException: Compiling "GeneratedClass": Two 
non-abstract methods "public int scala.collection.TraversableOnce.size()" have 
the same parameter types, declaring type and return type{code}
 

I include the full generated code that failed in one case below. There is no 
{{size()}} in the generated code. It's got to be down to some difference in 
Scala 2.12, potentially even a Janino problem.

 
{code:java}
Caused by: org.codehaus.janino.InternalCompilerException: Compiling 
"GeneratedClass": Two non-abstract methods "public int 
scala.collection.TraversableOnce.size()" have the same parameter types, 
declaring type and return type

at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:361)

at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:234)

at 
org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:446)

at 
org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:313)

at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:235)

at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:204)

at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80)

at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1342)

... 30 more

Caused by: org.codehaus.janino.InternalCompilerException: Two non-abstract 
methods "public int scala.collection.TraversableOnce.size()" have the same 
parameter types, declaring type and return type

at 
org.codehaus.janino.UnitCompiler.findMostSpecificIInvocable(UnitCompiler.java:9112)

at 
org.codehaus.janino.UnitCompiler.findMostSpecificIInvocable(UnitCompiler.java:8888)

at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:8770)

at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:8672)

at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:4737)

at org.codehaus.janino.UnitCompiler.access$8300(UnitCompiler.java:212)

at 
org.codehaus.janino.UnitCompiler$12.visitMethodInvocation(UnitCompiler.java:4097)

at 
org.codehaus.janino.UnitCompiler$12.visitMethodInvocation(UnitCompiler.java:4070)

at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:4902)

at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:4070)

at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:5253)

at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:4391)

at org.codehaus.janino.UnitCompiler.access$8000(UnitCompiler.java:212)

at 
org.codehaus.janino.UnitCompiler$12.visitConditionalExpression(UnitCompiler.java:4094)

at 
org.codehaus.janino.UnitCompiler$12.visitConditionalExpression(UnitCompiler.java:4070)

at org.codehaus.janino.Java$ConditionalExpression.accept(Java.java:4344)

at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:4070)

at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:5253)

at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2559)

at org.codehaus.janino.UnitCompiler.access$2700(UnitCompiler.java:212)

at 
org.codehaus.janino.UnitCompiler$6.visitLocalVariableDeclarationStatement(UnitCompiler.java:1482)

at 
org.codehaus.janino.UnitCompiler$6.visitLocalVariableDeclarationStatement(UnitCompiler.java:1466)

at 
org.codehaus.janino.Java$LocalVariableDeclarationStatement.accept(Java.java:3351)
...{code}
 

 
{code:java}
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */ return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends 
org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */ private Object[] references;
/* 008 */ private scala.collection.immutable.Map MapObjects_loopValue140;
/* 009 */ private int ExternalMapToCatalyst_key8;
/* 010 */ private int ExternalMapToCatalyst_value8;
/* 011 */ private boolean globalIsNull_0;
/* 012 */ private boolean MapObjects_loopIsNull140;
/* 013 */ private boolean globalIsNull_1;
/* 014 */ private boolean globalIsNull_2;
/* 015 */ private boolean globalIsNull_3;
/* 016 */ private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] 
mutableStateArray_0 = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1];
/* 017 */ private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] 
mutableStateArray_1 = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[6];
/* 018 */
/* 019 */ public SpecificUnsafeProjection(Object[] references) {
/* 020 */ this.references = references;
/* 021 */
/* 022 */ mutableStateArray_0[0] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 64);
/* 023 */ mutableStateArray_1[0] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0],
 8);
/* 024 */ mutableStateArray_1[1] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_1[0],
 4);
/* 025 */ mutableStateArray_1[2] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_1[0],
 4);
/* 026 */ mutableStateArray_1[3] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0],
 8);
/* 027 */ mutableStateArray_1[4] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_1[3],
 4);
/* 028 */ mutableStateArray_1[5] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_1[3],
 4);
/* 029 */
/* 030 */ }
/* 031 */
/* 032 */ public void initialize(int partitionIndex) {
/* 033 */
/* 034 */ }
/* 035 */
/* 036 */ // Scala.Function1 need this
/* 037 */ public java.lang.Object apply(java.lang.Object row) {
/* 038 */ return apply((InternalRow) row);
/* 039 */ }
/* 040 */
/* 041 */ public UnsafeRow apply(InternalRow i) {
/* 042 */ mutableStateArray_0[0].reset();
/* 043 */
/* 044 */
/* 045 */ mutableStateArray_0[0].zeroOutNullBytes();
/* 046 */ writeFields_0_0(i);
/* 047 */ writeFields_0_1(i);
/* 048 */ return (mutableStateArray_0[0].getRow());
/* 049 */ }
/* 050 */
/* 051 */
/* 052 */ private void writeFields_0_1(InternalRow i) {
/* 053 */
/* 054 */ ArrayData value_9 = MapObjects_1(i);
/* 055 */ if (globalIsNull_3) {
/* 056 */ mutableStateArray_0[0].setNullAt(1);
/* 057 */ } else {
/* 058 */ // Remember the current cursor so that we can calculate how many 
bytes are
/* 059 */ // written later.
/* 060 */ final int previousCursor_2 = mutableStateArray_0[0].cursor();
/* 061 */
/* 062 */ final ArrayData tmpInput_4 = value_9;
/* 063 */ if (tmpInput_4 instanceof UnsafeArrayData) {
/* 064 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_4);
/* 065 */ } else {
/* 066 */ final int numElements_3 = tmpInput_4.numElements();
/* 067 */ mutableStateArray_1[3].initialize(numElements_3);
/* 068 */
/* 069 */ for (int index_5 = 0; index_5 < numElements_3; index_5++) {
/* 070 */ if (tmpInput_4.isNullAt(index_5)) {
/* 071 */ mutableStateArray_1[3].setNull8Bytes(index_5);
/* 072 */ } else {
/* 073 */
/* 074 */ final MapData tmpInput_5 = tmpInput_4.getMap(index_5);
/* 075 */ if (tmpInput_5 instanceof UnsafeMapData) {
/* 076 */ mutableStateArray_1[3].write(index_5, (UnsafeMapData) tmpInput_5);
/* 077 */ } else {
/* 078 */ // Remember the current cursor so that we can calculate how many 
bytes are
/* 079 */ // written later.
/* 080 */ final int previousCursor_3 = mutableStateArray_1[3].cursor();
/* 081 */
/* 082 */ // preserve 8 bytes to write the key array numBytes later.
/* 083 */ mutableStateArray_1[3].grow(8);
/* 084 */ mutableStateArray_1[3].increaseCursor(8);
/* 085 */
/* 086 */ // Remember the current cursor so that we can write numBytes of key 
array later.
/* 087 */ final int tmpCursor_1 = mutableStateArray_1[3].cursor();
/* 088 */
/* 089 */
/* 090 */ final ArrayData tmpInput_6 = tmpInput_5.keyArray();
/* 091 */ if (tmpInput_6 instanceof UnsafeArrayData) {
/* 092 */ mutableStateArray_1[3].write((UnsafeArrayData) tmpInput_6);
/* 093 */ } else {
/* 094 */ final int numElements_4 = tmpInput_6.numElements();
/* 095 */ mutableStateArray_1[4].initialize(numElements_4);
/* 096 */
/* 097 */ for (int index_6 = 0; index_6 < numElements_4; index_6++) {
/* 098 */ if (tmpInput_6.isNullAt(index_6)) {
/* 099 */ mutableStateArray_1[4].setNull4Bytes(index_6);
/* 100 */ } else {
/* 101 */ mutableStateArray_1[4].write(index_6, tmpInput_6.getInt(index_6));
/* 102 */ }
/* 103 */ }
/* 104 */ }
/* 105 */
/* 106 */
/* 107 */ // Write the numBytes of key array into the first 8 bytes.
/* 108 */ Platform.putLong(
/* 109 */ mutableStateArray_1[3].getBuffer(),
/* 110 */ tmpCursor_1 - 8,
/* 111 */ mutableStateArray_1[3].cursor() - tmpCursor_1);
/* 112 */
/* 113 */
/* 114 */ final ArrayData tmpInput_7 = tmpInput_5.valueArray();
/* 115 */ if (tmpInput_7 instanceof UnsafeArrayData) {
/* 116 */ mutableStateArray_1[3].write((UnsafeArrayData) tmpInput_7);
/* 117 */ } else {
/* 118 */ final int numElements_5 = tmpInput_7.numElements();
/* 119 */ mutableStateArray_1[5].initialize(numElements_5);
/* 120 */
/* 121 */ for (int index_7 = 0; index_7 < numElements_5; index_7++) {
/* 122 */ if (tmpInput_7.isNullAt(index_7)) {
/* 123 */ mutableStateArray_1[5].setNull4Bytes(index_7);
/* 124 */ } else {
/* 125 */ mutableStateArray_1[5].write(index_7, tmpInput_7.getInt(index_7));
/* 126 */ }
/* 127 */ }
/* 128 */ }
/* 129 */
/* 130 */ mutableStateArray_1[3].setOffsetAndSizeFromPreviousCursor(index_5, 
previousCursor_3);
/* 131 */ }
/* 132 */
/* 133 */ }
/* 134 */ }
/* 135 */ }
/* 136 */
/* 137 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(1, 
previousCursor_2);
/* 138 */ }
/* 139 */
/* 140 */ }
/* 141 */
/* 142 */
/* 143 */ private MapData ExternalMapToCatalyst_1(InternalRow i) {
/* 144 */ MapData value_7 = null;
/* 145 */ if (!MapObjects_loopIsNull140) {
/* 146 */ final int length_1 = MapObjects_loopValue140.size();
/* 147 */ final Object[] convertedKeys_1 = new Object[length_1];
/* 148 */ final Object[] convertedValues_1 = new Object[length_1];
/* 149 */ int index_1 = 0;
/* 150 */ final scala.collection.Iterator entries_1 = 
MapObjects_loopValue140.iterator();
/* 151 */ while(entries_1.hasNext()) {
/* 152 */
/* 153 */ final scala.Tuple2 entry_1 = (scala.Tuple2) entries_1.next();
/* 154 */ ExternalMapToCatalyst_key8 = (Integer) entry_1._1();
/* 155 */ ExternalMapToCatalyst_value8 = (Integer) entry_1._2();
/* 156 */
/* 157 */
/* 158 */
/* 159 */
/* 160 */
/* 161 */ if (false) {
/* 162 */ throw new RuntimeException("Cannot use null as map key!");
/* 163 */ } else {
/* 164 */ convertedKeys_1[index_1] = (Integer) ExternalMapToCatalyst_key8;
/* 165 */ }
/* 166 */
/* 167 */
/* 168 */ if (false) {
/* 169 */ convertedValues_1[index_1] = null;
/* 170 */ } else {
/* 171 */ convertedValues_1[index_1] = (Integer) ExternalMapToCatalyst_value8;
/* 172 */ }
/* 173 */
/* 174 */ index_1++;
/* 175 */ }
/* 176 */
/* 177 */ value_7 = new 
org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new 
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys_1), new 
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues_1));
/* 178 */ }
/* 179 */ globalIsNull_2 = MapObjects_loopIsNull140;
/* 180 */ return value_7;
/* 181 */ }
/* 182 */
/* 183 */
/* 184 */ private ArrayData MapObjects_1(InternalRow i) {
/* 185 */ boolean isNull_4 = i.isNullAt(0);
/* 186 */ scala.collection.immutable.Map[] value_6 = isNull_4 ?
/* 187 */ null : ((scala.collection.immutable.Map[])i.get(0, null));
/* 188 */ ArrayData value_5 = null;
/* 189 */
/* 190 */ if (!isNull_4) {
/* 191 */
/* 192 */ int dataLength_1 = value_6.length;
/* 193 */
/* 194 */ MapData[] convertedArray_1 = null;
/* 195 */ convertedArray_1 = new MapData[dataLength_1];
/* 196 */
/* 197 */
/* 198 */ int loopIndex_1 = 0;
/* 199 */
/* 200 */ while (loopIndex_1 < dataLength_1) {
/* 201 */ MapObjects_loopValue140 = (scala.collection.immutable.Map) 
(value_6[loopIndex_1]);
/* 202 */ MapObjects_loopIsNull140 = MapObjects_loopValue140 == null;
/* 203 */
/* 204 */ MapData value_8 = ExternalMapToCatalyst_1(i);
/* 205 */ if (globalIsNull_2) {
/* 206 */ convertedArray_1[loopIndex_1] = null;
/* 207 */ } else {
/* 208 */ convertedArray_1[loopIndex_1] = value_8 instanceof UnsafeMapData? 
value_8.copy() : value_8;
/* 209 */ }
/* 210 */
/* 211 */ loopIndex_1 += 1;
/* 212 */ }
/* 213 */
/* 214 */ value_5 = new 
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedArray_1);
/* 215 */ }
/* 216 */ globalIsNull_3 = isNull_4;
/* 217 */ return value_5;
/* 218 */ }
/* 219 */
/* 220 */
/* 221 */ private void writeFields_0_0(InternalRow i) {
/* 222 */
/* 223 */ ArrayData value_4 = MapObjects_0(i);
/* 224 */ if (globalIsNull_1) {
/* 225 */ mutableStateArray_0[0].setNullAt(0);
/* 226 */ } else {
/* 227 */ // Remember the current cursor so that we can calculate how many 
bytes are
/* 228 */ // written later.
/* 229 */ final int previousCursor_0 = mutableStateArray_0[0].cursor();
/* 230 */
/* 231 */ final ArrayData tmpInput_0 = value_4;
/* 232 */ if (tmpInput_0 instanceof UnsafeArrayData) {
/* 233 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0);
/* 234 */ } else {
/* 235 */ final int numElements_0 = tmpInput_0.numElements();
/* 236 */ mutableStateArray_1[0].initialize(numElements_0);
/* 237 */
/* 238 */ for (int index_2 = 0; index_2 < numElements_0; index_2++) {
/* 239 */ if (tmpInput_0.isNullAt(index_2)) {
/* 240 */ mutableStateArray_1[0].setNull8Bytes(index_2);
/* 241 */ } else {
/* 242 */
/* 243 */ final MapData tmpInput_1 = tmpInput_0.getMap(index_2);
/* 244 */ if (tmpInput_1 instanceof UnsafeMapData) {
/* 245 */ mutableStateArray_1[0].write(index_2, (UnsafeMapData) tmpInput_1);
/* 246 */ } else {
/* 247 */ // Remember the current cursor so that we can calculate how many 
bytes are
/* 248 */ // written later.
/* 249 */ final int previousCursor_1 = mutableStateArray_1[0].cursor();
/* 250 */
/* 251 */ // preserve 8 bytes to write the key array numBytes later.
/* 252 */ mutableStateArray_1[0].grow(8);
/* 253 */ mutableStateArray_1[0].increaseCursor(8);
/* 254 */
/* 255 */ // Remember the current cursor so that we can write numBytes of key 
array later.
/* 256 */ final int tmpCursor_0 = mutableStateArray_1[0].cursor();
/* 257 */
/* 258 */
/* 259 */ final ArrayData tmpInput_2 = tmpInput_1.keyArray();
/* 260 */ if (tmpInput_2 instanceof UnsafeArrayData) {
/* 261 */ mutableStateArray_1[0].write((UnsafeArrayData) tmpInput_2);
/* 262 */ } else {
/* 263 */ final int numElements_1 = tmpInput_2.numElements();
/* 264 */ mutableStateArray_1[1].initialize(numElements_1);
/* 265 */
/* 266 */ for (int index_3 = 0; index_3 < numElements_1; index_3++) {
/* 267 */ if (tmpInput_2.isNullAt(index_3)) {
/* 268 */ mutableStateArray_1[1].setNull4Bytes(index_3);
/* 269 */ } else {
/* 270 */ mutableStateArray_1[1].write(index_3, tmpInput_2.getInt(index_3));
/* 271 */ }
/* 272 */ }
/* 273 */ }
/* 274 */
/* 275 */
/* 276 */ // Write the numBytes of key array into the first 8 bytes.
/* 277 */ Platform.putLong(
/* 278 */ mutableStateArray_1[0].getBuffer(),
/* 279 */ tmpCursor_0 - 8,
/* 280 */ mutableStateArray_1[0].cursor() - tmpCursor_0);
/* 281 */
/* 282 */
/* 283 */ final ArrayData tmpInput_3 = tmpInput_1.valueArray();
/* 284 */ if (tmpInput_3 instanceof UnsafeArrayData) {
/* 285 */ mutableStateArray_1[0].write((UnsafeArrayData) tmpInput_3);
/* 286 */ } else {
/* 287 */ final int numElements_2 = tmpInput_3.numElements();
/* 288 */ mutableStateArray_1[2].initialize(numElements_2);
/* 289 */
/* 290 */ for (int index_4 = 0; index_4 < numElements_2; index_4++) {
/* 291 */ if (tmpInput_3.isNullAt(index_4)) {
/* 292 */ mutableStateArray_1[2].setNull4Bytes(index_4);
/* 293 */ } else {
/* 294 */ mutableStateArray_1[2].write(index_4, tmpInput_3.getInt(index_4));
/* 295 */ }
/* 296 */ }
/* 297 */ }
/* 298 */
/* 299 */ mutableStateArray_1[0].setOffsetAndSizeFromPreviousCursor(index_2, 
previousCursor_1);
/* 300 */ }
/* 301 */
/* 302 */ }
/* 303 */ }
/* 304 */ }
/* 305 */
/* 306 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, 
previousCursor_0);
/* 307 */ }
/* 308 */
/* 309 */ }
/* 310 */
/* 311 */
/* 312 */ private MapData ExternalMapToCatalyst_0(InternalRow i) {
/* 313 */ MapData value_2 = null;
/* 314 */ if (!MapObjects_loopIsNull140) {
/* 315 */ final int length_0 = MapObjects_loopValue140.size();
/* 316 */ final Object[] convertedKeys_0 = new Object[length_0];
/* 317 */ final Object[] convertedValues_0 = new Object[length_0];
/* 318 */ int index_0 = 0;
/* 319 */ final scala.collection.Iterator entries_0 = 
MapObjects_loopValue140.iterator();
/* 320 */ while(entries_0.hasNext()) {
/* 321 */
/* 322 */ final scala.Tuple2 entry_0 = (scala.Tuple2) entries_0.next();
/* 323 */ ExternalMapToCatalyst_key8 = (Integer) entry_0._1();
/* 324 */ ExternalMapToCatalyst_value8 = (Integer) entry_0._2();
/* 325 */
/* 326 */
/* 327 */
/* 328 */
/* 329 */
/* 330 */ if (false) {
/* 331 */ throw new RuntimeException("Cannot use null as map key!");
/* 332 */ } else {
/* 333 */ convertedKeys_0[index_0] = (Integer) ExternalMapToCatalyst_key8;
/* 334 */ }
/* 335 */
/* 336 */
/* 337 */ if (false) {
/* 338 */ convertedValues_0[index_0] = null;
/* 339 */ } else {
/* 340 */ convertedValues_0[index_0] = (Integer) ExternalMapToCatalyst_value8;
/* 341 */ }
/* 342 */
/* 343 */ index_0++;
/* 344 */ }
/* 345 */
/* 346 */ value_2 = new 
org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new 
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys_0), new 
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues_0));
/* 347 */ }
/* 348 */ globalIsNull_0 = MapObjects_loopIsNull140;
/* 349 */ return value_2;
/* 350 */ }
/* 351 */
/* 352 */
/* 353 */ private ArrayData MapObjects_0(InternalRow i) {
/* 354 */ boolean isNull_1 = i.isNullAt(0);
/* 355 */ scala.collection.immutable.Map[] value_1 = isNull_1 ?
/* 356 */ null : ((scala.collection.immutable.Map[])i.get(0, null));
/* 357 */ ArrayData value_0 = null;
/* 358 */
/* 359 */ if (!isNull_1) {
/* 360 */
/* 361 */ int dataLength_0 = value_1.length;
/* 362 */
/* 363 */ MapData[] convertedArray_0 = null;
/* 364 */ convertedArray_0 = new MapData[dataLength_0];
/* 365 */
/* 366 */
/* 367 */ int loopIndex_0 = 0;
/* 368 */
/* 369 */ while (loopIndex_0 < dataLength_0) {
/* 370 */ MapObjects_loopValue140 = (scala.collection.immutable.Map) 
(value_1[loopIndex_0]);
/* 371 */ MapObjects_loopIsNull140 = MapObjects_loopValue140 == null;
/* 372 */
/* 373 */ MapData value_3 = ExternalMapToCatalyst_0(i);
/* 374 */ if (globalIsNull_0) {
/* 375 */ convertedArray_0[loopIndex_0] = null;
/* 376 */ } else {
/* 377 */ convertedArray_0[loopIndex_0] = value_3 instanceof UnsafeMapData? 
value_3.copy() : value_3;
/* 378 */ }
/* 379 */
/* 380 */ loopIndex_0 += 1;
/* 381 */ }
/* 382 */
/* 383 */ value_0 = new 
org.apache.spark.sql.catalyst.util.GenericArrayData(convertedArray_0);
/* 384 */ }
/* 385 */ globalIsNull_1 = isNull_1;
/* 386 */ return value_0;
/* 387 */ }
/* 388 */
/* 389 */ }{code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to