Github user kiszk commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20850#discussion_r178463388
  
    --- Diff: 
sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/codegen/UnsafeRowWriter.java
 ---
    @@ -117,150 +138,81 @@ public long getFieldOffset(int ordinal) {
         return startingOffset + nullBitsSize + 8 * ordinal;
       }
     
    -  public void setOffsetAndSize(int ordinal, int size) {
    -    setOffsetAndSize(ordinal, holder.cursor, size);
    -  }
    -
    -  public void setOffsetAndSize(int ordinal, int currentCursor, int size) {
    -    final long relativeOffset = currentCursor - startingOffset;
    -    final long fieldOffset = getFieldOffset(ordinal);
    -    final long offsetAndSize = (relativeOffset << 32) | (long) size;
    -
    -    Platform.putLong(holder.buffer, fieldOffset, offsetAndSize);
    -  }
    -
       public void write(int ordinal, boolean value) {
         final long offset = getFieldOffset(ordinal);
    -    Platform.putLong(holder.buffer, offset, 0L);
    -    Platform.putBoolean(holder.buffer, offset, value);
    +    Platform.putLong(buffer(), offset, 0L);
    +    writeBoolean(offset, value);
       }
     
       public void write(int ordinal, byte value) {
         final long offset = getFieldOffset(ordinal);
    -    Platform.putLong(holder.buffer, offset, 0L);
    -    Platform.putByte(holder.buffer, offset, value);
    +    Platform.putLong(buffer(), offset, 0L);
    +    writeByte(offset, value);
       }
     
       public void write(int ordinal, short value) {
         final long offset = getFieldOffset(ordinal);
    -    Platform.putLong(holder.buffer, offset, 0L);
    -    Platform.putShort(holder.buffer, offset, value);
    +    Platform.putLong(buffer(), offset, 0L);
    +    writeShort(offset, value);
       }
     
       public void write(int ordinal, int value) {
         final long offset = getFieldOffset(ordinal);
    -    Platform.putLong(holder.buffer, offset, 0L);
    -    Platform.putInt(holder.buffer, offset, value);
    +    Platform.putLong(buffer(), offset, 0L);
    +    writeInt(offset, value);
       }
     
       public void write(int ordinal, long value) {
    -    Platform.putLong(holder.buffer, getFieldOffset(ordinal), value);
    +    writeLong(getFieldOffset(ordinal), value);
       }
     
       public void write(int ordinal, float value) {
    -    if (Float.isNaN(value)) {
    -      value = Float.NaN;
    -    }
         final long offset = getFieldOffset(ordinal);
    -    Platform.putLong(holder.buffer, offset, 0L);
    -    Platform.putFloat(holder.buffer, offset, value);
    +    Platform.putLong(buffer(), offset, 0L);
    +    writeFloat(offset, value);
       }
     
       public void write(int ordinal, double value) {
    -    if (Double.isNaN(value)) {
    -      value = Double.NaN;
    -    }
    -    Platform.putDouble(holder.buffer, getFieldOffset(ordinal), value);
    +    writeDouble(getFieldOffset(ordinal), value);
       }
     
       public void write(int ordinal, Decimal input, int precision, int scale) {
         if (precision <= Decimal.MAX_LONG_DIGITS()) {
           // make sure Decimal object has the same scale as DecimalType
           if (input.changePrecision(precision, scale)) {
    --- End diff --
    
    Good point. I will add `input != null`.
    I am also curious about the differences between these two methods.



---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to