asheeshgarg commented on issue #621: Broadcast Join Failure
URL: 
https://github.com/apache/incubator-iceberg/issues/621#issuecomment-554488385
 
 
   Following is the code generated from spark
   DEBUG CodeGenerator: 
   /* 001 */ public Object generate(Object[] references) {
   /* 002 */   return new GeneratedIteratorForCodegenStage1(references);
   /* 003 */ }
   /* 004 */
   /* 005 */ // codegenStageId=1
   /* 006 */ final class GeneratedIteratorForCodegenStage1 extends 
org.apache.spark.sql.execution.BufferedRowIterator {
   /* 007 */   private Object[] references;
   /* 008 */   private scala.collection.Iterator[] inputs;
   /* 009 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] 
filter_mutableStateArray_0 = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[2];
   /* 010 */   private scala.collection.Iterator[] 
datasourcev2scan_mutableStateArray_0 = new scala.collection.Iterator[1];
   /* 011 */
   /* 012 */   public GeneratedIteratorForCodegenStage1(Object[] references) {
   /* 013 */     this.references = references;
   /* 014 */   }
   /* 015 */
   /* 016 */   public void init(int index, scala.collection.Iterator[] inputs) {
   /* 017 */     partitionIndex = index;
   /* 018 */     this.inputs = inputs;
   /* 019 */     datasourcev2scan_mutableStateArray_0[0] = inputs[0];
   /* 020 */     filter_mutableStateArray_0[0] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32);
   /* 021 */     filter_mutableStateArray_0[1] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32);
   /* 022 */
   /* 023 */   }
   /* 024 */
   /* 025 */   protected void processNext() throws java.io.IOException {
   /* 026 */     while (datasourcev2scan_mutableStateArray_0[0].hasNext()) {
   /* 027 */       InternalRow datasourcev2scan_row_0 = (InternalRow) 
datasourcev2scan_mutableStateArray_0[0].next();
   /* 028 */       ((org.apache.spark.sql.execution.metric.SQLMetric) 
references[0] /* numOutputRows */).add(1);
   /* 029 */       do {
   /* 030 */         boolean datasourcev2scan_isNull_1 = 
datasourcev2scan_row_0.isNullAt(1);
   /* 031 */         UTF8String datasourcev2scan_value_1 = 
datasourcev2scan_isNull_1 ?
   /* 032 */         null : (datasourcev2scan_row_0.getUTF8String(1));
   /* 033 */
   /* 034 */         if (!(!datasourcev2scan_isNull_1)) continue;
   /* 035 */
   /* 036 */         ((org.apache.spark.sql.execution.metric.SQLMetric) 
references[1] /* numOutputRows */).add(1);
   /* 037 */
   /* 038 */         int datasourcev2scan_value_0 = 
datasourcev2scan_row_0.getInt(0);
   /* 039 */         filter_mutableStateArray_0[1].reset();
   /* 040 */
   /* 041 */         filter_mutableStateArray_0[1].write(0, 
datasourcev2scan_value_0);
   /* 042 */
   /* 043 */         if (false) {
   /* 044 */           filter_mutableStateArray_0[1].setNullAt(1);
   /* 045 */         } else {
   /* 046 */           filter_mutableStateArray_0[1].write(1, 
datasourcev2scan_value_1);
   /* 047 */         }
   /* 048 */         append((filter_mutableStateArray_0[1].getRow()));
   /* 049 */
   /* 050 */       } while(false);
   /* 051 */       if (shouldStop()) return;
   /* 052 */     }
   /* 053 */   }
   /* 054 */
   /* 055 */ }
   
   

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to