yaooqinn commented on issue #26371: [SPARK-27976][SQL] Add built-in Array Functions: array_append URL: https://github.com/apache/spark/pull/26371#issuecomment-549264011 ```java --- !query 24 -explain codegen select array_append(a, e) from VALUES (array(1,2), 3), (array(3, 4), null), (null, 3), (null, null) tbl(a, e) --- !query 24 schema -struct<plan:string> --- !query 24 output -Found 1 WholeStageCodegen subtrees. -== Subtree 1 / 1 (maxMethodCodeSize:453; maxConstantPoolSize:151(0.23% used); numInnerClasses:0) == -*Project [array_append(a#x, e#x) AS array_append(a, e)#x] -+- *LocalTableScan [a#x, e#x] - -Generated code: -/* 001 */ public Object generate(Object[] references) { -/* 002 */ return new GeneratedIteratorForCodegenStage1(references); -/* 003 */ } -/* 004 */ -/* 005 */ // codegenStageId=1 -/* 006 */ final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { -/* 007 */ private Object[] references; -/* 008 */ private scala.collection.Iterator[] inputs; -/* 009 */ private scala.collection.Iterator localtablescan_input_0; -/* 010 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] project_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1];ssions.codegen.UnsafeArrayWriter[] project_mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; -/* 013 */ public GeneratedIteratorForCodegenStage1(Object[] references) { -/* 014 */ this.references = references; -/* 015 */ } -/* 016 */ -/* 017 */ public void init(int index, scala.collection.Iterator[] inputs) { -/* 018 */ partitionIndex = index; -/* 019 */ this.inputs = inputs; -/* 020 */ localtablescan_input_0 = inputs[0]; -/* 021 */ project_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); -/* 022 */ project_mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(project_mutableStateArray_0[0], 4); -/* 024 */ } -/* 025 */ -/* 026 */ protected void processNext() throws java.io.IOException { -/* 027 */ while ( localtablescan_input_0.hasNext()) { -/* 028 */ InternalRow localtablescan_row_0 = (InternalRow) localtablescan_input_0.next(); -/* 029 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* numOutputRows */).add(1); -/* 030 */ boolean project_isNull_0 = true; -/* 031 */ ArrayData project_value_0 = null; -/* 032 */ boolean localtablescan_isNull_0 = localtablescan_row_0.isNullAt(0); -/* 033 */ ArrayData localtablescan_value_0 = localtablescan_isNull_0 ? -/* 034 */ null : (localtablescan_row_0.getArray(0)); -/* 035 */ boolean localtablescan_isNull_1 = localtablescan_row_0.isNullAt(1); -/* 036 */ int localtablescan_value_1 = localtablescan_isNull_1 ? -/* 037 */ -1 : (localtablescan_row_0.getInt(1)); -/* 038 */ if (localtablescan_isNull_0) { -/* 039 */ if (!localtablescan_isNull_1) { -/* 040 */ project_isNull_0 = false; // resultCode could change nullability. -/* 041 */ int project_oldArraySize_0 = 0; -/* 042 */ -/* 043 */ ArrayData project_newArray_0 = ArrayData.allocateArrayData( -/* 044 */ 4, 1, "array_append failed"); -/* 045 */ -/* 046 */ if (localtablescan_isNull_1) { -/* 047 */ project_newArray_0.setNullAt(project_oldArraySize_0); -/* 048 */ } else { -/* 049 */ project_newArray_0.setInt(project_oldArraySize_0, localtablescan_value_1); -/* 050 */ } -/* 051 */ -/* 052 */ project_value_0 = project_newArray_0; -/* 053 */ } -/* 054 */ } else { -/* 055 */ project_isNull_0 = false; // resultCode could change nullability. -/* 056 */ -/* 057 */ int project_oldArraySize_0 = localtablescan_value_0.numElements(); -/* 058 */ int project_newArraySize_0 = project_oldArraySize_0 + 1; -/* 059 */ -/* 060 */ ArrayData project_newArray_0 = ArrayData.allocateArrayData( -/* 061 */ 4, project_newArraySize_0, "array_append failed"); -/* 062 */ -/* 063 */ for (int project_i_0 = 0; project_i_0 < localtablescan_value_0.numElements(); project_i_0 ++) { -/* 064 */ if (localtablescan_value_0.isNullAt(project_i_0)) { -/* 065 */ project_newArray_0.setNullAt(project_i_0); -/* 066 */ } else { -/* 067 */ project_newArray_0.setInt(project_i_0, localtablescan_value_0.getInt(project_i_0)); -/* 068 */ } -/* 069 */ -/* 070 */ } -/* 071 */ -/* 072 */ if (localtablescan_isNull_1) { -/* 073 */ project_newArray_0.setNullAt(project_oldArraySize_0); -/* 074 */ } else { -/* 075 */ project_newArray_0.setInt(project_oldArraySize_0, localtablescan_value_1); -/* 076 */ } -/* 077 */ -/* 078 */ project_value_0 = project_newArray_0; -/* 079 */ -/* 080 */ } -/* 081 */ project_mutableStateArray_0[0].reset(); -/* 082 */ -/* 083 */ project_mutableStateArray_0[0].zeroOutNullBytes(); -/* 084 */ -/* 085 */ if (project_isNull_0) { -/* 086 */ project_mutableStateArray_0[0].setNullAt(0); -/* 087 */ } else { -/* 088 */ // Remember the current cursor so that we can calculate how many bytes are -/* 089 */ // written later. -/* 090 */ final int project_previousCursor_0 = project_mutableStateArray_0[0].cursor(); -/* 091 */ -/* 092 */ final ArrayData project_tmpInput_0 = project_value_0; -/* 093 */ if (project_tmpInput_0 instanceof UnsafeArrayData) { -/* 094 */ project_mutableStateArray_0[0].write((UnsafeArrayData) project_tmpInput_0); -/* 095 */ } else { -/* 096 */ final int project_numElements_0 = project_tmpInput_0.numElements(); -/* 097 */ project_mutableStateArray_1[0].initialize(project_numElements_0); -/* 098 */ -/* 099 */ for (int project_index_0 = 0; project_index_0 < project_numElements_0; project_index_0++) { -/* 100 */ if (project_tmpInput_0.isNullAt(project_index_0)) { -/* 101 */ project_mutableStateArray_1[0].setNull4Bytes(project_index_0); -/* 102 */ } else { -/* 103 */ project_mutableStateArray_1[0].write(project_index_0, project_tmpInput_0.getInt(project_index_0)); -/* 104 */ } -/* 105 */ -/* 106 */ } -/* 107 */ } -/* 108 */ -/* 109 */ project_mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, project_previousCursor_0); -/* 110 */ } -/* 111 */ append((project_mutableStateArray_0[0].getRow())); -/* 112 */ if (shouldStop()) return; -/* 113 */ } -/* 114 */ } -/* 115 */ -/* 116 */ } ```
---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [email protected] With regards, Apache Git Services --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
