Github user maropu commented on the issue:

    https://github.com/apache/spark/pull/15928
  
    codegen for `UDF`:
    ```
    == Subtree 1 / 1 ==
    *Project [HiveSimpleUDF#org.apache.hadoop.hive.ql.udf.UDFToDouble(id#1012L) 
AS f(id)#1017]
    +- *Range (0, 3, step=1, splits=Some(1))
    
    Generated code:
    /* 001 */ public Object generate(Object[] references) {
    /* 002 */   return new GeneratedIterator(references);
    /* 003 */ }
    /* 004 */
    /* 005 */ final class GeneratedIterator extends 
org.apache.spark.sql.execution.BufferedRowIterator {
    /* 006 */   private Object[] references;
    /* 007 */   private scala.collection.Iterator[] inputs;
    /* 008 */   private org.apache.spark.sql.execution.metric.SQLMetric 
range_numOutputRows;
    /* 009 */   private boolean range_initRange;
    /* 010 */   private long range_partitionEnd;
    /* 011 */   private long range_number;
    /* 012 */   private boolean range_overflow;
    /* 013 */   private scala.collection.Iterator range_input;
    /* 014 */   private UnsafeRow range_result;
    /* 015 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder range_holder;
    /* 016 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter 
range_rowWriter;
    /* 017 */   private org.apache.spark.sql.hive.HiveSimpleUDF project_hiveUDF;
    /* 018 */   private scala.Function1 project_converter;
    /* 019 */   private scala.Function1 project_catalystConverter;
    /* 020 */   private UnsafeRow project_result1;
    /* 021 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder project_holder;
    /* 022 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter 
project_rowWriter;
    /* 023 */
    /* 024 */   public GeneratedIterator(Object[] references) {   
    /* 025 */     this.references = references;
    /* 026 */   }
    /* 027 */
    /* 028 */   public void init(int index, scala.collection.Iterator[] inputs) 
{
    /* 029 */     partitionIndex = index;
    /* 030 */     this.inputs = inputs;
    /* 031 */     this.range_numOutputRows = 
(org.apache.spark.sql.execution.metric.SQLMetric) references[0];
    /* 032 */     range_initRange = false;
    /* 033 */     range_partitionEnd = 0L;
    /* 034 */     range_number = 0L;
    /* 035 */     range_overflow = false;
    /* 036 */     range_input = inputs[0];
    /* 037 */     range_result = new UnsafeRow(1);
    /* 038 */     this.range_holder = new 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(range_result, 0);
    /* 039 */     this.range_rowWriter = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(range_holder, 
1);
    /* 040 */     this.project_hiveUDF = 
(org.apache.spark.sql.hive.HiveSimpleUDF) references[1];
    /* 041 */     this.project_converter = (scala.Function1) 
project_hiveUDF.getWrapper(0);
    /* 042 */     this.project_catalystConverter = 
(scala.Function1)project_hiveUDF.getUnwrapper();
    /* 043 */     project_result1 = new UnsafeRow(1);
    /* 044 */     this.project_holder = new 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(project_result1, 
0);
    /* 045 */     this.project_rowWriter = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(project_holder,
 1);
    /* 046 */
    /* 047 */   }
    /* 048 */
    /* 049 */   private void initRange(int idx) {
    /* 050 */     java.math.BigInteger index = 
java.math.BigInteger.valueOf(idx);
    /* 051 */     java.math.BigInteger numSlice = 
java.math.BigInteger.valueOf(1L);
    /* 052 */     java.math.BigInteger numElement = 
java.math.BigInteger.valueOf(3L);
    /* 053 */     java.math.BigInteger step = java.math.BigInteger.valueOf(1L);
    /* 054 */     java.math.BigInteger start = java.math.BigInteger.valueOf(0L);
    /* 055 */
    /* 056 */     java.math.BigInteger st = 
index.multiply(numElement).divide(numSlice).multiply(step).add(start);
    /* 057 */     if 
(st.compareTo(java.math.BigInteger.valueOf(Long.MAX_VALUE)) > 0) {
    /* 058 */       range_number = Long.MAX_VALUE;
    /* 059 */     } else if 
(st.compareTo(java.math.BigInteger.valueOf(Long.MIN_VALUE)) < 0) {
    /* 060 */       range_number = Long.MIN_VALUE;
    /* 061 */     } else {
    /* 062 */       range_number = st.longValue();
    /* 063 */     }
    /* 064 */
    /* 065 */     java.math.BigInteger end = 
index.add(java.math.BigInteger.ONE).multiply(numElement).divide(numSlice)
    /* 066 */     .multiply(step).add(start);
    /* 067 */     if 
(end.compareTo(java.math.BigInteger.valueOf(Long.MAX_VALUE)) > 0) {
    /* 068 */       range_partitionEnd = Long.MAX_VALUE;
    /* 069 */     } else if 
(end.compareTo(java.math.BigInteger.valueOf(Long.MIN_VALUE)) < 0) {
    /* 070 */       range_partitionEnd = Long.MIN_VALUE;
    /* 071 */     } else {
    /* 072 */       range_partitionEnd = end.longValue();
    /* 073 */     }
    /* 074 */
    /* 075 */     range_numOutputRows.add((range_partitionEnd - range_number) / 
1L);
    /* 076 */   }
    /* 077 */
    /* 078 */   protected void processNext() throws java.io.IOException {
    /* 079 */     // initialize Range
    /* 080 */     if (!range_initRange) {
    /* 081 */       range_initRange = true;
    /* 082 */       initRange(partitionIndex);
    /* 083 */     }
    /* 084 */
    /* 085 */     while (!range_overflow && range_number < range_partitionEnd) {
    /* 086 */       long range_value = range_number;
    /* 087 */       range_number += 1L;
    /* 088 */       if (range_number < range_value ^ 1L < 0) {
    /* 089 */         range_overflow = true;
    /* 090 */       }
    /* 091 */
    /* 092 */       final Object project_arg = false ? null : 
project_converter.apply(range_value);
    /* 093 */
    /* 094 */       Double project_result = null;
    /* 095 */       try {
    /* 096 */         project_result = 
(Double)project_catalystConverter.apply(project_hiveUDF.callUdf(project_arg));
    /* 097 */       } catch (Exception e) {
    /* 098 */         throw new 
org.apache.spark.SparkException(project_hiveUDF.udfErrorMessage(), e);
    /* 099 */       }
    /* 100 */       boolean project_isNull = project_result == null;
    /* 101 */       Double project_value = -1.0;
    /* 102 */       if (!project_isNull) {
    /* 103 */         project_value = project_result;
    /* 104 */       }
    /* 105 */       project_rowWriter.zeroOutNullBytes();
    /* 106 */
    /* 107 */       if (project_isNull) {
    /* 108 */         project_rowWriter.setNullAt(0);
    /* 109 */       } else {
    /* 110 */         project_rowWriter.write(0, project_value);  
    /* 111 */       }
    /* 112 */       append(project_result1);
    /* 113 */
    /* 114 */       if (shouldStop()) return;
    /* 115 */     }
    /* 116 */   }
    /* 117 */ }
    ```
    
    codegen for `GenericUDF`:
    ```
    == Subtree 1 / 1 ==
    *Project 
[HiveGenericUDF#org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs(id#1012L) 
AS f(id)#1028L]
    +- *Range (0, 3, step=1, splits=Some(1))
    
    Generated code:
    /* 001 */ public Object generate(Object[] references) {
    /* 002 */   return new GeneratedIterator(references);
    /* 003 */ }
    /* 004 */
    /* 005 */ final class GeneratedIterator extends 
org.apache.spark.sql.execution.BufferedRowIterator {
    /* 006 */   private Object[] references;
    /* 007 */   private scala.collection.Iterator[] inputs;
    /* 008 */   private org.apache.spark.sql.execution.metric.SQLMetric 
range_numOutputRows;
    /* 009 */   private boolean range_initRange;
    /* 010 */   private long range_partitionEnd;
    /* 011 */   private long range_number;
    /* 012 */   private boolean range_overflow;
    /* 013 */   private scala.collection.Iterator range_input;
    /* 014 */   private UnsafeRow range_result;
    /* 015 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder range_holder;
    /* 016 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter 
range_rowWriter;
    /* 017 */   private org.apache.spark.sql.hive.HiveGenericUDF 
project_hiveUDF;
    /* 018 */   private 
org.apache.hadoop.hive.ql.udf.generic.GenericUDF$DeferredObject[] 
project_deferredObjects;
    /* 019 */   private 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector 
project_objectInspector;
    /* 020 */   private scala.Function1 project_converter;
    /* 021 */   private scala.Function1 project_catalystConverter;
    /* 022 */   private UnsafeRow project_result1;
    /* 023 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder project_holder;
    /* 024 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter 
project_rowWriter;
    /* 025 */
    /* 026 */   public GeneratedIterator(Object[] references) {   
    /* 027 */     this.references = references;
    /* 028 */   }
    /* 029 */
    /* 030 */   public void init(int index, scala.collection.Iterator[] inputs) 
{
    /* 031 */     partitionIndex = index;
    /* 032 */     this.inputs = inputs;
    /* 033 */     wholestagecodegen_init_0();
    /* 034 */     wholestagecodegen_init_1();
    /* 035 */
    /* 036 */   }
    /* 037 */
    /* 038 */   private void wholestagecodegen_init_0() {
    /* 039 */     this.range_numOutputRows = 
(org.apache.spark.sql.execution.metric.SQLMetric) references[0];
    /* 040 */     range_initRange = false;
    /* 041 */     range_partitionEnd = 0L;
    /* 042 */     range_number = 0L;
    /* 043 */     range_overflow = false;
    /* 044 */     range_input = inputs[0];
    /* 045 */     range_result = new UnsafeRow(1);
    /* 046 */     this.range_holder = new 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(range_result, 0);
    /* 047 */     this.range_rowWriter = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(range_holder, 
1);
    /* 048 */     this.project_hiveUDF = 
(org.apache.spark.sql.hive.HiveGenericUDF) references[1];
    /* 049 */     this.project_deferredObjects = 
(org.apache.hadoop.hive.ql.udf.generic.GenericUDF$DeferredObject[])project_hiveUDF.getDeferredObjects();
    /* 050 */     this.project_objectInspector = 
(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)project_hiveUDF.getReturnInspector();
    /* 051 */     this.project_converter = (scala.Function1) 
project_hiveUDF.getWrapper(0);
    /* 052 */     this.project_catalystConverter = 
(scala.Function1)project_hiveUDF.getUnwrapper();
    /* 053 */     project_result1 = new UnsafeRow(1);
    /* 054 */     this.project_holder = new 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(project_result1, 
0);
    /* 055 */
    /* 056 */   }
    /* 057 */
    /* 058 */   private void wholestagecodegen_init_1() {
    /* 059 */     this.project_rowWriter = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(project_holder,
 1);
    /* 060 */
    /* 061 */   }
    /* 062 */
    /* 063 */   private void initRange(int idx) {
    /* 064 */     java.math.BigInteger index = 
java.math.BigInteger.valueOf(idx);
    /* 065 */     java.math.BigInteger numSlice = 
java.math.BigInteger.valueOf(1L);
    /* 066 */     java.math.BigInteger numElement = 
java.math.BigInteger.valueOf(3L);
    /* 067 */     java.math.BigInteger step = java.math.BigInteger.valueOf(1L);
    /* 068 */     java.math.BigInteger start = java.math.BigInteger.valueOf(0L);
    /* 069 */
    /* 070 */     java.math.BigInteger st = 
index.multiply(numElement).divide(numSlice).multiply(step).add(start);
    /* 071 */     if 
(st.compareTo(java.math.BigInteger.valueOf(Long.MAX_VALUE)) > 0) {
    /* 072 */       range_number = Long.MAX_VALUE;
    /* 073 */     } else if 
(st.compareTo(java.math.BigInteger.valueOf(Long.MIN_VALUE)) < 0) {
    /* 074 */       range_number = Long.MIN_VALUE;
    /* 075 */     } else {
    /* 076 */       range_number = st.longValue();
    /* 077 */     }
    /* 078 */
    /* 079 */     java.math.BigInteger end = 
index.add(java.math.BigInteger.ONE).multiply(numElement).divide(numSlice)
    /* 080 */     .multiply(step).add(start);
    /* 081 */     if 
(end.compareTo(java.math.BigInteger.valueOf(Long.MAX_VALUE)) > 0) {
    /* 082 */       range_partitionEnd = Long.MAX_VALUE;
    /* 083 */     } else if 
(end.compareTo(java.math.BigInteger.valueOf(Long.MIN_VALUE)) < 0) {
    /* 084 */       range_partitionEnd = Long.MIN_VALUE;
    /* 085 */     } else {
    /* 086 */       range_partitionEnd = end.longValue();
    /* 087 */     }
    /* 088 */
    /* 089 */     range_numOutputRows.add((range_partitionEnd - range_number) / 
1L);
    /* 090 */   }
    /* 091 */
    /* 092 */   protected void processNext() throws java.io.IOException {
    /* 093 */     // initialize Range
    /* 094 */     if (!range_initRange) {
    /* 095 */       range_initRange = true;
    /* 096 */       initRange(partitionIndex);
    /* 097 */     }
    /* 098 */
    /* 099 */     while (!range_overflow && range_number < range_partitionEnd) {
    /* 100 */       long range_value = range_number;
    /* 101 */       range_number += 1L;
    /* 102 */       if (range_number < range_value ^ 1L < 0) {
    /* 103 */         range_overflow = true;
    /* 104 */       }
    /* 105 */
    /* 106 */       final Object project_arg = false ? null : 
project_converter.apply(range_value);
    /* 107 */
    /* 108 */       ((org.apache.spark.sql.hive.DeferredObjectAdapter) 
this.project_deferredObjects[0])
    /* 109 */       .set(new 
org.apache.spark.api.java.function.Function0<Object>() {
    /* 110 */           @Override
    /* 111 */           public Object call() {
    /* 112 */             return project_arg;
    /* 113 */           }
    /* 114 */         });
    /* 115 */
    /* 116 */       Long project_result = null;
    /* 117 */       try {
    /* 118 */         project_result = 
(Long)project_catalystConverter.apply(project_hiveUDF.callUdf(project_deferredObjects));
    /* 119 */       } catch (Exception e) {
    /* 120 */         throw new 
org.apache.spark.SparkException(project_hiveUDF.udfErrorMessage(), e);
    /* 121 */       }
    /* 122 */       boolean project_isNull = project_result == null;
    /* 123 */       Long project_value = -1L;
    /* 124 */       if (!project_isNull) {
    /* 125 */         project_value = project_result;
    /* 126 */       }
    /* 127 */       project_rowWriter.zeroOutNullBytes();
    /* 128 */
    /* 129 */       if (project_isNull) {
    /* 130 */         project_rowWriter.setNullAt(0);
    /* 131 */       } else {
    /* 132 */         project_rowWriter.write(0, project_value);  
    /* 133 */       }
    /* 134 */       append(project_result1);
    /* 135 */
    /* 136 */       if (shouldStop()) return;
    /* 137 */     }
    /* 138 */   }
    /* 139 */ }
    ```


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to