panbingkun commented on PR #49573:
URL: https://github.com/apache/spark/pull/49573#issuecomment-2609091759

   - A bad case
   
https://github.com/apache/spark/blob/620f55262cab75485f4e1ee5d85dd24ab8a4c1aa/sql/core/src/test/scala/org/apache/spark/sql/DataFrameTimeWindowingSuite.scala#L165-L196
   
   - codegen code
   ```scala
   /* 001 */ public Object generate(Object[] references) {
   /* 002 */   return new GeneratedIteratorForCodegenStage1(references);
   /* 003 */ }
   /* 004 */
   /* 005 */ // codegenStageId=1
   /* 006 */ final class GeneratedIteratorForCodegenStage1 extends 
org.apache.spark.sql.execution.BufferedRowIterator {
   /* 007 */   private Object[] references;
   /* 008 */   private scala.collection.Iterator[] inputs;
   /* 009 */   private boolean hashAgg_initAgg_0;
   /* 010 */   private org.apache.spark.unsafe.KVIterator hashAgg_mapIter_0;
   /* 011 */   private 
org.apache.spark.sql.execution.UnsafeFixedWidthAggregationMap hashAgg_hashMap_0;
   /* 012 */   private org.apache.spark.sql.execution.UnsafeKVExternalSorter 
hashAgg_sorter_0;
   /* 013 */   private scala.collection.Iterator localtablescan_input_0;
   /* 014 */   private boolean expand_resultIsNull_0;
   /* 015 */   private long filter_subExprValue_0;
   /* 016 */   private boolean filter_subExprIsNull_0;
   /* 017 */   private long filter_subExprValue_1;
   /* 018 */   private boolean filter_subExprIsNull_1;
   /* 019 */   private long filter_subExprValue_2;
   /* 020 */   private boolean filter_subExprIsNull_2;
   /* 021 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] 
filter_mutableStateArray_0 = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[15];
   /* 022 */   private InternalRow[] expand_mutableStateArray_0 = new 
InternalRow[1];
   /* 023 */
   /* 024 */   public GeneratedIteratorForCodegenStage1(Object[] references) {
   /* 025 */     this.references = references;
   /* 026 */   }
   /* 027 */
   /* 028 */   public void init(int index, scala.collection.Iterator[] inputs) {
   /* 029 */     partitionIndex = index;
   /* 030 */     this.inputs = inputs;
   /* 031 */     wholestagecodegen_init_0_0();
   /* 032 */     wholestagecodegen_init_0_1();
   /* 033 */
   /* 034 */   }
   /* 035 */
   /* 036 */   private void wholestagecodegen_init_0_1() {
   /* 037 */     filter_mutableStateArray_0[8] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_mutableStateArray_0[7],
 2);
   /* 038 */     filter_mutableStateArray_0[9] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32);
   /* 039 */     filter_mutableStateArray_0[10] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_mutableStateArray_0[9],
 2);
   /* 040 */     filter_mutableStateArray_0[11] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32);
   /* 041 */     filter_mutableStateArray_0[12] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_mutableStateArray_0[11],
 2);
   /* 042 */     filter_mutableStateArray_0[13] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32);
   /* 043 */     filter_mutableStateArray_0[14] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_mutableStateArray_0[13],
 2);
   /* 044 */
   /* 045 */   }
   /* 046 */
   /* 047 */   private void wholestagecodegen_init_0_0() {
   /* 048 */     localtablescan_input_0 = inputs[0];
   /* 049 */     filter_mutableStateArray_0[0] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(3, 64);
   /* 050 */     filter_mutableStateArray_0[1] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32);
   /* 051 */     filter_mutableStateArray_0[2] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32);
   /* 052 */     expand_resultIsNull_0 = true;
   /* 053 */     expand_mutableStateArray_0[0] = null;
   /* 054 */     filter_mutableStateArray_0[3] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 64);
   /* 055 */     filter_mutableStateArray_0[4] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_mutableStateArray_0[3],
 2);
   /* 056 */     filter_mutableStateArray_0[5] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 64);
   /* 057 */     filter_mutableStateArray_0[6] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_mutableStateArray_0[5],
 2);
   /* 058 */     filter_mutableStateArray_0[7] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32);
   /* 059 */
   /* 060 */   }
   /* 061 */
   /* 062 */   private void filter_subExpr_0(boolean expand_resultIsNull_0, 
org.apache.spark.sql.catalyst.InternalRow expand_mutableStateArray_0[0]) {
   /* 063 */     // 1...
   /* 064 */     boolean filter_isNull_11 = expand_resultIsNull_0;
   /* 065 */     long filter_value_12 = -1L;
   /* 066 */
   /* 067 */     if (!expand_resultIsNull_0) {
   /* 068 */       if (expand_mutableStateArray_0[0].isNullAt(0)) {
   /* 069 */         filter_isNull_11 = true;
   /* 070 */       } else {
   /* 071 */         filter_value_12 = expand_mutableStateArray_0[0].getLong(0);
   /* 072 */       }
   /* 073 */
   /* 074 */     }
   /* 075 */     // 2...
   /* 076 */     filter_subExprIsNull_0 = filter_isNull_11;
   /* 077 */     // 3...
   /* 078 */     filter_subExprValue_0 = filter_value_12;
   /* 079 */   }
   /* 080 */
   /* 081 */   protected void processNext() throws java.io.IOException {
   /* 082 */     if (!hashAgg_initAgg_0) {
   /* 083 */       hashAgg_initAgg_0 = true;
   
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to