abhishekagarwal87 commented on code in PR #12753:
URL: https://github.com/apache/druid/pull/12753#discussion_r945523605


##########
processing/src/main/java/org/apache/druid/segment/data/CompressedBlockSerializer.java:
##########
@@ -0,0 +1,177 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.segment.data;
+
+import org.apache.druid.io.Channels;
+import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
+import org.apache.druid.segment.CompressedPools;
+import org.apache.druid.segment.serde.MetaSerdeHelper;
+import org.apache.druid.segment.serde.Serializer;
+import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
+import org.apache.druid.segment.writeout.WriteOutBytes;
+
+import javax.annotation.Nullable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.channels.WritableByteChannel;
+
+public class CompressedBlockSerializer implements Serializer
+{
+  private static final MetaSerdeHelper<CompressedBlockSerializer> 
META_SERDE_HELPER = MetaSerdeHelper
+      .firstWriteByte((CompressedBlockSerializer x) -> (byte) 0x01)
+      .writeByte(x -> x.compression.getId())
+      .writeInt(x -> CompressedPools.BUFFER_SIZE)
+      .writeInt(x -> x.numBlocks);
+
+  private final SegmentWriteOutMedium segmentWriteOutMedium;
+  private final CompressionStrategy compression;
+  private final CompressionStrategy.Compressor compressor;
+
+  private final ByteBuffer offsetValueConverter = 
ByteBuffer.allocate(Integer.BYTES).order(ByteOrder.nativeOrder());
+
+  @Nullable
+  private ByteBuffer uncompressedDataBuffer;
+  private ByteBuffer compressedDataBuffer;
+  private int numBlocks;
+  private int currentOffset;
+
+  @Nullable
+  private WriteOutBytes headerOut = null;
+  @Nullable
+  private WriteOutBytes valuesOut = null;
+
+  public CompressedBlockSerializer(
+      SegmentWriteOutMedium segmentWriteOutMedium,
+
+      CompressionStrategy compression,
+      int blockSize
+  )
+  {
+    this.segmentWriteOutMedium = segmentWriteOutMedium;
+    this.compression = compression;
+    this.compressor = compression.getCompressor();
+    this.uncompressedDataBuffer = compressor.allocateInBuffer(blockSize, 
segmentWriteOutMedium.getCloser())
+                                            .order(ByteOrder.nativeOrder());
+    this.compressedDataBuffer = compressor.allocateOutBuffer(blockSize, 
segmentWriteOutMedium.getCloser())
+                                          .order(ByteOrder.nativeOrder());
+  }
+
+  public void open() throws IOException
+  {
+    headerOut = segmentWriteOutMedium.makeWriteOutBytes();
+    valuesOut = segmentWriteOutMedium.makeWriteOutBytes();
+  }
+
+  public void addValue(byte[] bytes) throws IOException
+  {
+    if (uncompressedDataBuffer == null) {
+      throw new IllegalStateException("written out already");
+    }
+    flushIfNeeded();
+
+    if (bytes.length <= uncompressedDataBuffer.remaining()) {
+      uncompressedDataBuffer.put(bytes);
+    } else {
+      int written = 0;
+      // write until we have had our fill, flushing buffers as needed
+      while (written < bytes.length) {
+        int next = Math.min(uncompressedDataBuffer.remaining(), bytes.length - 
written);
+        uncompressedDataBuffer.put(bytes, written, next);
+        written += next;
+        flushIfNeeded();
+      }
+    }
+  }
+
+  public void addValue(ByteBuffer bytes) throws IOException
+  {
+    if (uncompressedDataBuffer == null) {
+      throw new IllegalStateException("written out already");
+    }
+    flushIfNeeded();
+    int size = bytes.remaining();
+    if (size <= uncompressedDataBuffer.remaining()) {
+      uncompressedDataBuffer.put(bytes);
+    } else {
+      ByteBuffer transferBuffer = 
bytes.asReadOnlyBuffer().order(bytes.order());
+      while (transferBuffer.hasRemaining()) {
+        int writeSize = Math.min(transferBuffer.remaining(), 
uncompressedDataBuffer.remaining());
+        transferBuffer.limit(transferBuffer.position() + writeSize);
+        uncompressedDataBuffer.put(transferBuffer);
+        transferBuffer.limit(bytes.limit());
+        flushIfNeeded();
+      }
+    }
+    bytes.rewind();
+  }
+
+  @Override
+  public long getSerializedSize() throws IOException
+  {
+    writeEndBuffer();
+    return META_SERDE_HELPER.size(this) + headerOut.size() + valuesOut.size();
+  }
+
+  @Override
+  public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) 
throws IOException
+  {
+    writeEndBuffer();
+    META_SERDE_HELPER.writeTo(channel, this);
+    headerOut.writeTo(channel);
+    valuesOut.writeTo(channel);
+  }
+
+  private void flushIfNeeded() throws IOException
+  {
+    if (!uncompressedDataBuffer.hasRemaining()) {
+      flushBuffer();
+    }
+  }
+
+  private void flushBuffer() throws IOException
+  {
+    uncompressedDataBuffer.rewind();
+    compressedDataBuffer.clear();
+
+    final ByteBuffer compressed = compressor.compress(uncompressedDataBuffer, 
compressedDataBuffer);
+
+    currentOffset += compressed.remaining();
+    offsetValueConverter.clear();
+    offsetValueConverter.putInt(currentOffset);
+    offsetValueConverter.flip();
+    Channels.writeFully(headerOut, offsetValueConverter);
+    Channels.writeFully(valuesOut, compressed);
+    uncompressedDataBuffer.clear();
+    numBlocks++;
+    if (numBlocks < 0) {

Review Comment:
   how does this happen? 



##########
processing/src/main/java/org/apache/druid/query/expression/NestedDataExpressions.java:
##########
@@ -0,0 +1,669 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+package org.apache.druid.query.expression;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.jackson.DefaultObjectMapper;
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.math.expr.Expr;
+import org.apache.druid.math.expr.ExprEval;
+import org.apache.druid.math.expr.ExprMacroTable;
+import org.apache.druid.math.expr.ExprType;
+import org.apache.druid.math.expr.ExpressionType;
+import org.apache.druid.segment.nested.NestedDataComplexTypeSerde;
+import org.apache.druid.segment.nested.NestedPathFinder;
+import org.apache.druid.segment.nested.NestedPathPart;
+import org.apache.druid.segment.nested.StructuredData;
+import org.apache.druid.segment.nested.StructuredDataProcessor;
+
+import javax.annotation.Nullable;
+import java.util.HashMap;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class NestedDataExpressions
+{
+  private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
+
+  public static final ExpressionType TYPE = Preconditions.checkNotNull(
+      ExpressionType.fromColumnType(NestedDataComplexTypeSerde.TYPE)
+  );
+
+  public static class StructExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "struct";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      Preconditions.checkArgument(args.size() % 2 == 0);
+      class StructExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public StructExpr(List<Expr> args)
+        {
+          super(NAME, args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          HashMap<String, Object> theMap = new HashMap<>();
+          for (int i = 0; i < args.size(); i += 2) {
+            ExprEval field = args.get(i).eval(bindings);
+            ExprEval value = args.get(i + 1).eval(bindings);
+
+            Preconditions.checkArgument(field.type().is(ExprType.STRING), 
"field name must be a STRING");
+            theMap.put(field.asString(), value.value());
+          }
+
+          return ExprEval.ofComplex(TYPE, theMap);
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new StructExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          return TYPE;
+        }
+      }
+      return new StructExpr(args);
+    }
+  }
+
+  public static class JsonObjectExprMacro extends StructExprMacro
+  {
+    public static final String NAME = "json_object";
+    
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+  }
+
+  public static class ToJsonExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "to_json";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      class ToJsonExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public ToJsonExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          return ExprEval.ofComplex(
+              TYPE,
+              maybeUnwrapStructuredData(input)
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new ToJsonExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          return TYPE;
+        }
+      }
+      return new ToJsonExpr(args);
+    }
+  }
+
+  public static class ToJsonStringExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "to_json_string";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      class ToJsonStringExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public ToJsonStringExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          try {
+            final Object unwrapped = maybeUnwrapStructuredData(input);
+            final String stringify = unwrapped == null ? null : 
JSON_MAPPER.writeValueAsString(unwrapped);
+            return ExprEval.ofType(
+                ExpressionType.STRING,
+                stringify
+            );
+          }
+          catch (JsonProcessingException e) {
+            throw new IAE(e, "Unable to stringify [%s] to JSON", 
input.value());
+          }
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new ToJsonStringExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          return ExpressionType.STRING;
+        }
+      }
+      return new ToJsonStringExpr(args);
+    }
+  }
+
+  public static class ParseJsonExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "parse_json";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      class ParseJsonExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public ParseJsonExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval arg = args.get(0).eval(bindings);
+          Object parsed = maybeUnwrapStructuredData(arg);
+          if (arg.type().is(ExprType.STRING) && arg.value() != null && 
maybeJson(arg.asString())) {
+            try {
+              parsed = JSON_MAPPER.readValue(arg.asString(), Object.class);
+            }
+            catch (JsonProcessingException e) {
+              throw new IAE("Bad string input [%s] to [%s]", arg.asString(), 
name());
+            }
+          }
+          return ExprEval.ofComplex(
+              TYPE,
+              parsed
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new ParseJsonExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          return TYPE;
+        }
+      }
+      return new ParseJsonExpr(args);
+    }
+  }
+
+
+
+  public static class GetPathExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "get_path";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      final List<NestedPathPart> parts = getArg1PathPartsFromLiteral(name(), 
args);
+      class GetPathExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public GetPathExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          return ExprEval.bestEffortOf(
+              NestedPathFinder.findLiteral(maybeUnwrapStructuredData(input), 
parts)
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new GetPathExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          // we cannot infer the output type (well we could say it is 'STRING' 
right now because is all we support...
+          return null;
+        }
+      }
+      return new GetPathExpr(args);
+    }
+  }
+
+  public static class JsonQueryExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "json_query";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      final List<NestedPathPart> parts = 
getArg1JsonPathPartsFromLiteral(name(), args);
+      class JsonQueryExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public JsonQueryExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          return ExprEval.ofComplex(
+              TYPE,
+              NestedPathFinder.find(maybeUnwrapStructuredData(input), parts)
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new JsonQueryExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          // call all the output JSON typed
+          return TYPE;
+        }
+      }
+      return new JsonQueryExpr(args);
+    }
+  }
+
+  public static class JsonValueExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "json_value";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      final List<NestedPathPart> parts = 
getArg1JsonPathPartsFromLiteral(name(), args);
+      class JsonValueExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public JsonValueExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          return ExprEval.bestEffortOf(
+              NestedPathFinder.findLiteral(maybeUnwrapStructuredData(input), 
parts)
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new JsonValueExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          // we cannot infer the output type (well we could say it is 'STRING' 
right now because is all we support...
+          return null;
+        }
+      }
+      return new JsonValueExpr(args);
+    }
+  }
+
+  public static class ListPathsExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "list_paths";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      final StructuredDataProcessor processor = new StructuredDataProcessor()
+      {
+        @Override
+        public int processLiteralField(String fieldName, Object fieldValue)

Review Comment:
   this method needs javadoc. what is the significance of return value here? 



##########
processing/src/main/java/org/apache/druid/segment/data/CompressedBlockReader.java:
##########
@@ -0,0 +1,248 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.segment.data;
+
+import com.google.common.base.Preconditions;
+import org.apache.druid.collections.ResourceHolder;
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.java.util.common.io.Closer;
+import org.apache.druid.segment.CompressedPools;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.IntBuffer;
+import java.util.function.Supplier;
+
+/**
+ * Reader for a virtual contiguous address range backed by compressed blocks 
of data.
+ *
+ * Format:
+ * | version (byte) | compression (byte) | num blocks (int) | block size (int) 
| end offsets | compressed data |
+ *
+ * This mechanism supports two modes of use, the first where callers may ask 
for a range of data from the underlying
+ * blocks, provided by {@link #getRange(long, int)}. The {@link ByteBuffer} 
provided by this method may or may not
+ * be valid after additional calls to {@link #getRange(long, int)} or calls to 
{@link #seekBlock(int)}.
+ *
+ * For fixed width values which are aligned with the block size, callers may 
also use the method
+ * {@link #getDecompressedDataBuffer()} to have direct access to the current 
uncompressed block, and use the methods
+ * {@link #loadBlock(long)} to load the correct block and translate a virtual 
offset into the relative offset, or
+ * {@link #seekBlock(int)} to change which block is currently loaded.
+ *
+ * {@link #getRange(long, int)} uses these same mechanisms internally to 
supply data.
+ */
+public final class CompressedBlockReader implements Closeable
+{
+  private static final ByteBuffer NULL_VALUE = ByteBuffer.wrap(new byte[0]);
+  public static final byte VERSION = 0x01;
+
+  public static Supplier<CompressedBlockReader> fromByteBuffer(ByteBuffer 
buffer, ByteOrder byteOrder)
+  {
+    byte versionFromBuffer = buffer.get();
+
+    if (versionFromBuffer == VERSION) {
+      final CompressionStrategy compression = 
CompressionStrategy.forId(buffer.get());
+      final int blockSize = buffer.getInt();
+      assert CompressedPools.BUFFER_SIZE == blockSize;
+      Preconditions.checkState(
+          blockSize <= CompressedPools.BUFFER_SIZE,
+          "Maximum block size must be less than " + CompressedPools.BUFFER_SIZE
+      );
+      final int numBlocks = buffer.getInt();
+      final int offsetsSize = numBlocks * Integer.BYTES;
+      // buffer is at start of ending offsets
+      final ByteBuffer offsets = buffer.asReadOnlyBuffer().order(byteOrder);
+      offsets.limit(offsets.position() + offsetsSize);
+      final IntBuffer offsetView = 
offsets.slice().order(byteOrder).asIntBuffer();
+      final int compressedSize = offsetView.get(numBlocks - 1);
+
+      // move to start of compressed data
+      buffer.position(buffer.position() + offsetsSize);
+      final ByteBuffer compressedData = 
buffer.asReadOnlyBuffer().order(byteOrder);
+      compressedData.limit(compressedData.position() + compressedSize);
+      buffer.position(buffer.position() + compressedSize);
+
+      final ByteBuffer compressedDataView = 
compressedData.slice().order(byteOrder);
+      return () -> new CompressedBlockReader(
+          compression,
+          numBlocks,
+          blockSize,
+          offsetView.asReadOnlyBuffer(),
+          compressedDataView.asReadOnlyBuffer().order(byteOrder),
+          byteOrder
+      );
+    }
+    throw new IAE("Unknown version[%s]", versionFromBuffer);
+  }
+
+  private final CompressionStrategy.Decompressor decompressor;
+
+  private final int numBlocks;
+  private final int div;
+  private final int rem;
+  private final IntBuffer endOffsetsBuffer;
+  private final ByteBuffer compressedDataBuffer;
+
+  private final ResourceHolder<ByteBuffer> decompressedDataBufferHolder;
+  private final ByteBuffer decompressedDataBuffer;
+
+  private final ByteOrder byteOrder;
+  private final Closer closer;
+  private int currentBlockNumber = -1;
+
+  public CompressedBlockReader(
+      CompressionStrategy compressionStrategy,
+      int numBlocks,
+      int blockSize,
+      IntBuffer endOffsetsBuffer,
+      ByteBuffer compressedDataBuffer,
+      ByteOrder byteOrder
+  )
+  {
+    this.decompressor = compressionStrategy.getDecompressor();
+    this.numBlocks = numBlocks;
+    this.div = Integer.numberOfTrailingZeros(blockSize);
+    this.rem = blockSize - 1;
+    this.endOffsetsBuffer = endOffsetsBuffer;
+    this.compressedDataBuffer = compressedDataBuffer;
+    this.closer = Closer.create();
+    this.decompressedDataBufferHolder = CompressedPools.getByteBuf(byteOrder);
+    closer.register(decompressedDataBufferHolder);
+    this.decompressedDataBuffer = decompressedDataBufferHolder.get();
+    this.decompressedDataBuffer.clear();
+    this.byteOrder = byteOrder;
+  }
+
+  /**
+   * Get size in bytes of virtual contiguous buffer
+   */
+  public long getSize()

Review Comment:
   that will be of help. Since At line #75, it says `final int compressedSize = 
offsetView.get(numBlocks - 1);` 



##########
processing/src/main/java/org/apache/druid/query/expression/NestedDataExpressions.java:
##########
@@ -0,0 +1,669 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+package org.apache.druid.query.expression;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import org.apache.druid.jackson.DefaultObjectMapper;
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.math.expr.Expr;
+import org.apache.druid.math.expr.ExprEval;
+import org.apache.druid.math.expr.ExprMacroTable;
+import org.apache.druid.math.expr.ExprType;
+import org.apache.druid.math.expr.ExpressionType;
+import org.apache.druid.segment.nested.NestedDataComplexTypeSerde;
+import org.apache.druid.segment.nested.NestedPathFinder;
+import org.apache.druid.segment.nested.NestedPathPart;
+import org.apache.druid.segment.nested.StructuredData;
+import org.apache.druid.segment.nested.StructuredDataProcessor;
+
+import javax.annotation.Nullable;
+import java.util.HashMap;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class NestedDataExpressions
+{
+  private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
+
+  public static final ExpressionType TYPE = Preconditions.checkNotNull(
+      ExpressionType.fromColumnType(NestedDataComplexTypeSerde.TYPE)
+  );
+
+  public static class StructExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "struct";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      Preconditions.checkArgument(args.size() % 2 == 0);
+      class StructExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public StructExpr(List<Expr> args)
+        {
+          super(NAME, args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          HashMap<String, Object> theMap = new HashMap<>();
+          for (int i = 0; i < args.size(); i += 2) {
+            ExprEval field = args.get(i).eval(bindings);
+            ExprEval value = args.get(i + 1).eval(bindings);
+
+            Preconditions.checkArgument(field.type().is(ExprType.STRING), 
"field name must be a STRING");
+            theMap.put(field.asString(), value.value());
+          }
+
+          return ExprEval.ofComplex(TYPE, theMap);
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new StructExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          return TYPE;
+        }
+      }
+      return new StructExpr(args);
+    }
+  }
+
+  public static class JsonObjectExprMacro extends StructExprMacro
+  {
+    public static final String NAME = "json_object";
+    
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+  }
+
+  public static class ToJsonExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "to_json";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      class ToJsonExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public ToJsonExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          return ExprEval.ofComplex(
+              TYPE,
+              maybeUnwrapStructuredData(input)
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new ToJsonExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          return TYPE;
+        }
+      }
+      return new ToJsonExpr(args);
+    }
+  }
+
+  public static class ToJsonStringExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "to_json_string";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      class ToJsonStringExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public ToJsonStringExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          try {
+            final Object unwrapped = maybeUnwrapStructuredData(input);
+            final String stringify = unwrapped == null ? null : 
JSON_MAPPER.writeValueAsString(unwrapped);
+            return ExprEval.ofType(
+                ExpressionType.STRING,
+                stringify
+            );
+          }
+          catch (JsonProcessingException e) {
+            throw new IAE(e, "Unable to stringify [%s] to JSON", 
input.value());
+          }
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new ToJsonStringExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          return ExpressionType.STRING;
+        }
+      }
+      return new ToJsonStringExpr(args);
+    }
+  }
+
+  public static class ParseJsonExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "parse_json";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      class ParseJsonExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public ParseJsonExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval arg = args.get(0).eval(bindings);
+          Object parsed = maybeUnwrapStructuredData(arg);
+          if (arg.type().is(ExprType.STRING) && arg.value() != null && 
maybeJson(arg.asString())) {
+            try {
+              parsed = JSON_MAPPER.readValue(arg.asString(), Object.class);
+            }
+            catch (JsonProcessingException e) {
+              throw new IAE("Bad string input [%s] to [%s]", arg.asString(), 
name());
+            }
+          }
+          return ExprEval.ofComplex(
+              TYPE,
+              parsed
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new ParseJsonExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          return TYPE;
+        }
+      }
+      return new ParseJsonExpr(args);
+    }
+  }
+
+
+
+  public static class GetPathExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "get_path";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      final List<NestedPathPart> parts = getArg1PathPartsFromLiteral(name(), 
args);
+      class GetPathExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public GetPathExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          return ExprEval.bestEffortOf(
+              NestedPathFinder.findLiteral(maybeUnwrapStructuredData(input), 
parts)
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new GetPathExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          // we cannot infer the output type (well we could say it is 'STRING' 
right now because is all we support...
+          return null;
+        }
+      }
+      return new GetPathExpr(args);
+    }
+  }
+
+  public static class JsonQueryExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "json_query";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      final List<NestedPathPart> parts = 
getArg1JsonPathPartsFromLiteral(name(), args);
+      class JsonQueryExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public JsonQueryExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          return ExprEval.ofComplex(
+              TYPE,
+              NestedPathFinder.find(maybeUnwrapStructuredData(input), parts)
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new JsonQueryExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          // call all the output JSON typed
+          return TYPE;
+        }
+      }
+      return new JsonQueryExpr(args);
+    }
+  }
+
+  public static class JsonValueExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "json_value";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      final List<NestedPathPart> parts = 
getArg1JsonPathPartsFromLiteral(name(), args);
+      class JsonValueExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public JsonValueExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          return ExprEval.bestEffortOf(
+              NestedPathFinder.findLiteral(maybeUnwrapStructuredData(input), 
parts)
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new JsonValueExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          // we cannot infer the output type (well we could say it is 'STRING' 
right now because is all we support...
+          return null;
+        }
+      }
+      return new JsonValueExpr(args);
+    }
+  }
+
+  public static class ListPathsExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "list_paths";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      final StructuredDataProcessor processor = new StructuredDataProcessor()
+      {
+        @Override
+        public int processLiteralField(String fieldName, Object fieldValue)
+        {
+          // do nothing, we only want the list of fields returned by this 
processor
+          return 0;
+        }
+      };
+
+      class ListPathsExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public ListPathsExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          StructuredDataProcessor.ProcessResults info = 
processor.processFields(maybeUnwrapStructuredData(input));
+          return ExprEval.ofType(
+              ExpressionType.STRING_ARRAY,
+              ImmutableList.copyOf(info.getLiteralFields())
+          );
+        }
+
+        @Override
+        public Expr visit(Shuttle shuttle)
+        {
+          List<Expr> newArgs = args.stream().map(x -> 
x.visit(shuttle)).collect(Collectors.toList());
+          return shuttle.visit(new ListPathsExpr(newArgs));
+        }
+
+        @Nullable
+        @Override
+        public ExpressionType getOutputType(InputBindingInspector inspector)
+        {
+          return ExpressionType.STRING_ARRAY;
+        }
+      }
+      return new ListPathsExpr(args);
+    }
+  }
+
+  public static class JsonPathsExprMacro implements ExprMacroTable.ExprMacro
+  {
+    public static final String NAME = "json_paths";
+
+    @Override
+    public String name()
+    {
+      return NAME;
+    }
+
+    @Override
+    public Expr apply(List<Expr> args)
+    {
+      final StructuredDataProcessor processor = new StructuredDataProcessor()
+      {
+        @Override
+        public int processLiteralField(String fieldName, Object fieldValue)
+        {
+          // do nothing, we only want the list of fields returned by this 
processor
+          return 0;
+        }
+      };
+
+      class JsonPathsExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
+      {
+        public JsonPathsExpr(List<Expr> args)
+        {
+          super(name(), args);
+        }
+
+        @Override
+        public ExprEval eval(ObjectBinding bindings)
+        {
+          ExprEval input = args.get(0).eval(bindings);
+          // maybe in the future ProcessResults should deal in 
PathFinder.PathPart instead of strings for fields
+          StructuredDataProcessor.ProcessResults info = 
processor.processFields(maybeUnwrapStructuredData(input));
+          List<String> transformed = info.getLiteralFields()

Review Comment:
   have you thought about caching some results here? For example, many literal 
fields are going to be repeated across input records and `parseJqPath` doesn't 
seem cheap.  



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to