Github user sameeragarwal commented on the pull request:

    https://github.com/apache/spark/pull/11359#issuecomment-188922487
  
    Generated code:
    
    ```java
    /* 001 */ public Object generate(Object[] references) {
    /* 002 */   return new GeneratedIterator(references);
    /* 003 */ }
    /* 004 */
    /* 005 */ /** Codegened pipeline for:
    /* 006 */ * Sort [id#0L ASC], true, 0
    /* 007 */ +- INPUT
    /* 008 */ */
    /* 009 */ class GeneratedIterator extends 
org.apache.spark.sql.execution.BufferedRowIterator {
    /* 010 */   private Object[] references;
    /* 011 */   private boolean sort_needToSort;
    /* 012 */   private org.apache.spark.sql.execution.Sort sort_plan;
    /* 013 */   private org.apache.spark.sql.execution.UnsafeExternalRowSorter 
sort_sorter;
    /* 014 */   private org.apache.spark.executor.TaskMetrics sort_metrics;
    /* 015 */   private scala.collection.Iterator<UnsafeRow> sort_sortedIter;
    /* 016 */   private scala.collection.Iterator inputadapter_input;
    /* 017 */   private UnsafeRow sort_result;
    /* 018 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder sort_holder;
    /* 019 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter 
sort_rowWriter;
    /* 020 */   private long sort_dataSize;
    /* 021 */   private long sort_spillSize;
    /* 022 */   private long sort_spillSizeBefore;
    /* 023 */
    /* 024 */   public GeneratedIterator(Object[] references) {
    /* 025 */     this.references = references;
    /* 026 */   }
    /* 027 */
    /* 028 */   public void init(scala.collection.Iterator inputs[]) {
    /* 029 */     sort_needToSort = true;
    /* 030 */     this.sort_plan = (org.apache.spark.sql.execution.Sort) 
references[0];
    /* 031 */     sort_sorter = sort_plan.createSorter();
    /* 032 */     sort_metrics = 
org.apache.spark.TaskContext.get().taskMetrics();
    /* 033 */
    /* 034 */     inputadapter_input = inputs[0];
    /* 035 */     sort_result = new UnsafeRow(1);
    /* 036 */     this.sort_holder = new 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(sort_result, 0);
    /* 037 */     this.sort_rowWriter = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(sort_holder, 
1);
    /* 038 */
    /* 039 */   }
    /* 040 */
    /* 041 */   private void sort_addToSorter() throws java.io.IOException {
    /* 042 */     while (inputadapter_input.hasNext()) {
    /* 043 */       InternalRow inputadapter_row = (InternalRow) 
inputadapter_input.next();
    /* 044 */       /* input[0, bigint] */
    /* 045 */       boolean inputadapter_isNull = inputadapter_row.isNullAt(0);
    /* 046 */       long inputadapter_value = inputadapter_isNull ? -1L : 
(inputadapter_row.getLong(0));
    /* 047 */       // Convert the input attributes to an UnsafeRow and add it 
to the sorter
    /* 048 */
    /* 049 */       sort_rowWriter.zeroOutNullBytes();
    /* 050 */
    /* 051 */       if (inputadapter_isNull) {
    /* 052 */         sort_rowWriter.setNullAt(0);
    /* 053 */       } else {
    /* 054 */         sort_rowWriter.write(0, inputadapter_value);
    /* 055 */       }
    /* 056 */
    /* 057 */       sort_sorter.insertRow(sort_result);
    /* 058 */       if (shouldStop()) {
    /* 059 */         return;
    /* 060 */       }
    /* 061 */     }
    /* 062 */
    /* 063 */   }
    /* 064 */
    /* 065 */   protected void processNext() throws java.io.IOException {
    /* 066 */     if (sort_needToSort) {
    /* 067 */       sort_addToSorter();
    /* 068 */       sort_spillSizeBefore = sort_metrics.memoryBytesSpilled();
    /* 069 */       sort_sortedIter = sort_sorter.sort();
    /* 070 */       sort_dataSize += sort_sorter.getPeakMemoryUsage();
    /* 071 */       sort_spillSize += sort_metrics.memoryBytesSpilled() - 
sort_spillSizeBefore;
    /* 072 */       
sort_metrics.incPeakExecutionMemory(sort_sorter.getPeakMemoryUsage());
    /* 073 */       sort_needToSort = false;
    /* 074 */     }
    /* 075 */
    /* 076 */     while (sort_sortedIter.hasNext()) {
    /* 077 */       UnsafeRow sort_outputRow = 
(UnsafeRow)sort_sortedIter.next();
    /* 078 */       append(sort_outputRow.copy());
    /* 079 */     }
    /* 080 */   }
    /* 081 */ }
    ```


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to