[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14982099#comment-14982099
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43479756
  
--- Diff: exec/java-exec/src/main/codegen/templates/UnionVector.java ---
@@ -0,0 +1,479 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.drill.common.types.TypeProtos.MinorType;
+
+<@pp.dropOutputFile />
+<@pp.changeOutputFile 
name="/org/apache/drill/exec/vector/complex/impl/UnionVector.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+import java.util.Iterator;
+import org.apache.drill.exec.vector.complex.impl.ComplexCopier;
+import org.apache.drill.exec.util.CallBack;
+
+/*
+ * This class is generated using freemarker and the ${.template_name} 
template.
+ */
+@SuppressWarnings("unused")
+
+
+public class UnionVector implements ValueVector {
+
+  private MaterializedField field;
+  private BufferAllocator allocator;
+  private Accessor accessor = new Accessor();
+  private Mutator mutator = new Mutator();
+  private int valueCount;
+
+  private MapVector internalMap;
+  private SingleMapWriter internalMapWriter;
+  private UInt1Vector typeVector;
+
+  private MapVector mapVector;
+  private ListVector listVector;
+  private NullableBigIntVector bigInt;
+  private NullableVarCharVector varChar;
+
+  private FieldReader reader;
+  private NullableBitVector bit;
+
+  private State state = State.INIT;
+  private int singleType = 0;
+  private ValueVector singleVector;
+  private MajorType majorType;
+
+  private final CallBack callBack;
+
+  private enum State {
+INIT, SINGLE, MULTI
+  }
+
+  public UnionVector(MaterializedField field, BufferAllocator allocator, 
CallBack callBack) {
+this.field = field.clone();
+this.allocator = allocator;
+internalMap = new MapVector("internal", allocator, callBack);
+internalMapWriter = new SingleMapWriter(internalMap, null, true, true);
+this.typeVector = internalMap.addOrGet("types", 
Types.required(MinorType.UINT1), UInt1Vector.class);
+this.field.addChild(internalMap.getField().clone());
+this.majorType = field.getType();
+this.callBack = callBack;
+  }
+
+  private void updateState(ValueVector v) {
+if (state == State.INIT) {
+  state = State.SINGLE;
+  singleVector = v;
+  singleType = v.getField().getType().getMinorType().getNumber();
+} else {
+  state = State.MULTI;
+  singleVector = null;
+}
+  }
+
+  public List getSubTypes() {
+return majorType.getSubTypeList();
+  }
+
+  private void addSubType(MinorType type) {
+majorType =  
MajorType.newBuilder(this.majorType).addSubType(type).build();
+if (callBack != null) {
+  callBack.doWork();
+}
+  }
+
+  public boolean isSingleType() {
+return state == State.SINGLE && singleType != MinorType.LIST_VALUE;
+  }
+
+  public ValueVector getSingleVector() {
+assert state != State.MULTI : "Cannot get single vector when there are 
multiple types";
--- End diff --

This State was originally designed to eliminate some of the overhead when 
there was only a single subtype, but with PromotableWriter, this is really not 
necessary, and this State is no longer used. So I removed this code.


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  

[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14982095#comment-14982095
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43479369
  
--- Diff: exec/java-exec/src/main/codegen/templates/UnionFunctions.java ---
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+<@pp.dropOutputFile />
+<@pp.changeOutputFile 
name="/org/apache/drill/exec/expr/fn/impl/GUnionFunctions.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.expr.fn.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import 
org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import javax.inject.Inject;
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.record.RecordBatch;
+
+/*
+ * This class is generated using freemarker and the ${.template_name} 
template.
+ */
+
+@SuppressWarnings("unused")
+public class GUnionFunctions {
+
+  <#list vv.types as type><#list type.minor as minor><#assign name = 
minor.class?cap_first />
+  <#assign fields = minor.fields!type.fields />
+  <#assign uncappedName = name?uncap_first/>
+
+  <#if !minor.class?starts_with("Decimal")>
+
+  @SuppressWarnings("unused")
+  @FunctionTemplate(name = "is_${name?upper_case}", scope = 
FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.INTERNAL)
+  public static class UnionIs${name} implements DrillSimpleFunc {
+
+@Param UnionHolder in;
+@Output BitHolder out;
+
+public void setup() {}
+
+public void eval() {
+  if (in.isSet == 1) {
+out.value = in.getType().getMinorType() == 
org.apache.drill.common.types.TypeProtos.MinorType.${name?upper_case} ? 1 : 0;
+  } else {
+out.value = 0;
+  }
+}
+  }
+
+  @SuppressWarnings("unused")
+  @FunctionTemplate(name = "assert_${name?upper_case}", scope = 
FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.INTERNAL)
+  public static class CastUnion${name} implements DrillSimpleFunc {
+
+@Param UnionHolder in;
+@Output Nullable${name}Holder out;
+
+public void setup() {}
+
+public void eval() {
+  if (in.isSet == 1) {
+in.reader.read(out);
--- End diff --

It throws a message like:
"You tried to read a [%s] type when you are using a field reader of type 
[%s]."

I think these functions are internal and not for general use, so this type 
of message is problem fine.


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  Issue Type: Sub-task
>  Components: Execution - Codegen, Execution - Data Types, Execution - 
> Relational Operators, Functions - Drill
>Reporter: Steven Phillips
>Assignee: Hanifi Gunes
> Fix For: 1.3.0
>
>
> Support the ability for existing vectors to be promoted similar to supported 
> implicit casting rules.
> For example:
> INT > DOUBLE > STRING > EMBEDDED



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14982088#comment-14982088
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43479065
  
--- Diff: exec/java-exec/src/main/codegen/templates/MapWriters.java ---
@@ -52,9 +52,18 @@
   private final Map fields = Maps.newHashMap();
   <#if mode == "Repeated">private int currentChildIndex = 0;
 
-  public ${mode}MapWriter(${containerClass} container, FieldWriter parent) 
{
+  private final boolean unionEnabled;
+  private final boolean unionInternalMap;
--- End diff --

It's actually not needed, it is a relic of an older version. I removed it.


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  Issue Type: Sub-task
>  Components: Execution - Codegen, Execution - Data Types, Execution - 
> Relational Operators, Functions - Drill
>Reporter: Steven Phillips
>Assignee: Hanifi Gunes
> Fix For: 1.3.0
>
>
> Support the ability for existing vectors to be promoted similar to supported 
> implicit casting rules.
> For example:
> INT > DOUBLE > STRING > EMBEDDED



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14982180#comment-14982180
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43482198
  
--- Diff: exec/java-exec/src/main/codegen/templates/UnionVector.java ---
@@ -0,0 +1,479 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.drill.common.types.TypeProtos.MinorType;
+
+<@pp.dropOutputFile />
+<@pp.changeOutputFile 
name="/org/apache/drill/exec/vector/complex/impl/UnionVector.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+import java.util.Iterator;
+import org.apache.drill.exec.vector.complex.impl.ComplexCopier;
+import org.apache.drill.exec.util.CallBack;
+
+/*
+ * This class is generated using freemarker and the ${.template_name} 
template.
+ */
+@SuppressWarnings("unused")
+
+
+public class UnionVector implements ValueVector {
+
+  private MaterializedField field;
+  private BufferAllocator allocator;
+  private Accessor accessor = new Accessor();
+  private Mutator mutator = new Mutator();
+  private int valueCount;
+
+  private MapVector internalMap;
+  private SingleMapWriter internalMapWriter;
+  private UInt1Vector typeVector;
+
+  private MapVector mapVector;
+  private ListVector listVector;
+  private NullableBigIntVector bigInt;
+  private NullableVarCharVector varChar;
+
+  private FieldReader reader;
+  private NullableBitVector bit;
+
+  private State state = State.INIT;
+  private int singleType = 0;
+  private ValueVector singleVector;
+  private MajorType majorType;
+
+  private final CallBack callBack;
+
+  private enum State {
+INIT, SINGLE, MULTI
+  }
+
+  public UnionVector(MaterializedField field, BufferAllocator allocator, 
CallBack callBack) {
+this.field = field.clone();
+this.allocator = allocator;
+internalMap = new MapVector("internal", allocator, callBack);
+internalMapWriter = new SingleMapWriter(internalMap, null, true, true);
+this.typeVector = internalMap.addOrGet("types", 
Types.required(MinorType.UINT1), UInt1Vector.class);
+this.field.addChild(internalMap.getField().clone());
+this.majorType = field.getType();
+this.callBack = callBack;
+  }
+
+  private void updateState(ValueVector v) {
+if (state == State.INIT) {
+  state = State.SINGLE;
+  singleVector = v;
+  singleType = v.getField().getType().getMinorType().getNumber();
+} else {
+  state = State.MULTI;
+  singleVector = null;
+}
+  }
+
+  public List getSubTypes() {
+return majorType.getSubTypeList();
+  }
+
+  private void addSubType(MinorType type) {
+majorType =  
MajorType.newBuilder(this.majorType).addSubType(type).build();
+if (callBack != null) {
+  callBack.doWork();
+}
+  }
+
+  public boolean isSingleType() {
+return state == State.SINGLE && singleType != MinorType.LIST_VALUE;
+  }
+
+  public ValueVector getSingleVector() {
+assert state != State.MULTI : "Cannot get single vector when there are 
multiple types";
+assert state != State.INIT : "Cannot get single vector when there are 
no types";
+return singleVector;
+  }
+
+  private static final MajorType MAP_TYPE = Types.optional(MinorType.MAP);
+
+  public MapVector getMap() {
+if (mapVector == null) {
+  int vectorCount = internalMap.size();
+  mapVector = internalMap.addOrGet("map", MAP_TYPE, MapVector.class);
+  updateState(mapVector);
+  addSubType(MinorType.MAP);
+  if 

[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14982089#comment-14982089
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43479145
  
--- Diff: exec/java-exec/src/main/codegen/templates/NullReader.java ---
@@ -56,11 +56,17 @@ public void copyAsValue(MapWriter writer) {}
 
   public void copyAsValue(ListWriter writer) {}
 
-  <#list vv.types as type><#list type.minor as minor><#assign name = 
minor.class?cap_first /> 
+  public void copyAsValue(UnionWriter writer) {}
+
+  <#list vv.types as type><#list type.minor as minor><#assign name = 
minor.class?cap_first />
+  public void read(${name}Holder holder){
+throw new UnsupportedOperationException("NullReader cannot read into 
non-nullable holder");
--- End diff --

This is an internal error message, not the result of a potential user error.


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  Issue Type: Sub-task
>  Components: Execution - Codegen, Execution - Data Types, Execution - 
> Relational Operators, Functions - Drill
>Reporter: Steven Phillips
>Assignee: Hanifi Gunes
> Fix For: 1.3.0
>
>
> Support the ability for existing vectors to be promoted similar to supported 
> implicit casting rules.
> For example:
> INT > DOUBLE > STRING > EMBEDDED



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14982121#comment-14982121
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43480218
  
--- Diff: exec/java-exec/src/main/codegen/templates/UnionVector.java ---
@@ -0,0 +1,479 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.drill.common.types.TypeProtos.MinorType;
+
+<@pp.dropOutputFile />
+<@pp.changeOutputFile 
name="/org/apache/drill/exec/vector/complex/impl/UnionVector.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+import java.util.Iterator;
+import org.apache.drill.exec.vector.complex.impl.ComplexCopier;
+import org.apache.drill.exec.util.CallBack;
+
+/*
+ * This class is generated using freemarker and the ${.template_name} 
template.
+ */
+@SuppressWarnings("unused")
+
+
+public class UnionVector implements ValueVector {
+
+  private MaterializedField field;
+  private BufferAllocator allocator;
+  private Accessor accessor = new Accessor();
+  private Mutator mutator = new Mutator();
+  private int valueCount;
+
+  private MapVector internalMap;
+  private SingleMapWriter internalMapWriter;
+  private UInt1Vector typeVector;
+
+  private MapVector mapVector;
+  private ListVector listVector;
+  private NullableBigIntVector bigInt;
+  private NullableVarCharVector varChar;
+
+  private FieldReader reader;
+  private NullableBitVector bit;
+
+  private State state = State.INIT;
+  private int singleType = 0;
+  private ValueVector singleVector;
+  private MajorType majorType;
+
+  private final CallBack callBack;
+
+  private enum State {
+INIT, SINGLE, MULTI
+  }
+
+  public UnionVector(MaterializedField field, BufferAllocator allocator, 
CallBack callBack) {
+this.field = field.clone();
+this.allocator = allocator;
+internalMap = new MapVector("internal", allocator, callBack);
+internalMapWriter = new SingleMapWriter(internalMap, null, true, true);
+this.typeVector = internalMap.addOrGet("types", 
Types.required(MinorType.UINT1), UInt1Vector.class);
+this.field.addChild(internalMap.getField().clone());
+this.majorType = field.getType();
+this.callBack = callBack;
+  }
+
+  private void updateState(ValueVector v) {
+if (state == State.INIT) {
+  state = State.SINGLE;
+  singleVector = v;
+  singleType = v.getField().getType().getMinorType().getNumber();
+} else {
+  state = State.MULTI;
+  singleVector = null;
+}
+  }
+
+  public List getSubTypes() {
+return majorType.getSubTypeList();
+  }
+
+  private void addSubType(MinorType type) {
+majorType =  
MajorType.newBuilder(this.majorType).addSubType(type).build();
+if (callBack != null) {
+  callBack.doWork();
+}
+  }
+
+  public boolean isSingleType() {
+return state == State.SINGLE && singleType != MinorType.LIST_VALUE;
+  }
+
+  public ValueVector getSingleVector() {
+assert state != State.MULTI : "Cannot get single vector when there are 
multiple types";
+assert state != State.INIT : "Cannot get single vector when there are 
no types";
+return singleVector;
+  }
+
+  private static final MajorType MAP_TYPE = Types.optional(MinorType.MAP);
+
+  public MapVector getMap() {
+if (mapVector == null) {
+  int vectorCount = internalMap.size();
+  mapVector = internalMap.addOrGet("map", MAP_TYPE, MapVector.class);
+  updateState(mapVector);
+  addSubType(MinorType.MAP);
+  if 

[jira] [Commented] (DRILL-3963) Read raw key value bytes from sequence files

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3963?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983445#comment-14983445
 ] 

ASF GitHub Bot commented on DRILL-3963:
---

Github user amithadke commented on the pull request:

https://github.com/apache/drill/pull/214#issuecomment-152661547
  
Jacques was having similar issue. I'm trying to reproduce but I'm seeing 
something different

Test set: org.apache.drill.exec.impersonation.TestImpersonationQueries

---
Tests run: 9, Failures: 0, Errors: 2, Skipped: 0, Time elapsed: 70.825 sec 
<<< FAILURE! - in org.apache.drill.exec.impersonation.TestImpersonationQueries

testDirectImpersonation_HasGroupReadPermissions(org.apache.drill.exec.impersonation.TestImpersonationQueries)
  Time elapsed: 0.11 sec  <<< ERROR!
org.apache.drill.common.exceptions.UserRemoteException: SYSTEM ERROR: 
OutOfMemoryError: unable to create new native thread


[Error Id: 1f1822ed-903b-4b45-9744-cc914688255f on 192.168.10.206:31013]
at 
org.apache.drill.exec.rpc.user.QueryResultHandler.resultArrived(QueryResultHandler.java:118)
at 
org.apache.drill.exec.rpc.user.UserClient.handleReponse(UserClient.java:110)
at 
org.apache.drill.exec.rpc.BasicClientWithConnection.handle(BasicClientWithConnection.java:47)
at 
org.apache.drill.exec.rpc.BasicClientWithConnection.handle(BasicClientWithConnection.java:32)
at org.apache.drill.exec.rpc.RpcBus.handle(RpcBus.java:61)
at 
org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:233)
at 
org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:205)
at 
io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:89)
at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:254)
at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:242)
at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
io.netty.channel.ChannelInboundHandlerAdapter.channelRead(ChannelInboundHandlerAdapter.java:86)
at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:847)
at 
io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
at 
io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
at 
io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
at 
io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
at 
io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
at java.lang.Thread.run(Thread.java:745)




> Read raw key value bytes from sequence files
> 
>
> Key: DRILL-3963
> URL: https://issues.apache.org/jira/browse/DRILL-3963
> Project: Apache Drill
>  Issue Type: New Feature
>Reporter: amit hadke
>Assignee: amit hadke
>
> Sequence files store list of key-value pairs. Keys/values are of type hadoop 
> writable.
> Provide a format plugin that reads raw bytes out of sequence files which can 
> be further deserialized by a udf(from hadoop writable -> drill type)



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983603#comment-14983603
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43563065
  
--- Diff: 
exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
 ---
@@ -323,23 +454,50 @@ public LogicalExpression 
visitIfExpression(IfExpression ifExpr, FunctionLookupCo
 
   MinorType thenType = 
conditions.expression.getMajorType().getMinorType();
   MinorType elseType = newElseExpr.getMajorType().getMinorType();
-
-  // Check if we need a cast
-  if (thenType != elseType && !(thenType == MinorType.NULL || elseType 
== MinorType.NULL)) {
-
-MinorType leastRestrictive = 
TypeCastRules.getLeastRestrictiveType((Arrays.asList(thenType, elseType)));
-if (leastRestrictive != thenType) {
-  // Implicitly cast the then expression
+  boolean hasUnion = thenType == MinorType.UNION || elseType == 
MinorType.UNION;
+  if (unionTypeEnabled) {
+if (thenType != elseType && !(thenType == MinorType.NULL || 
elseType == MinorType.NULL)) {
+
+  MinorType leastRestrictive = MinorType.UNION;
+  MajorType.Builder builder = 
MajorType.newBuilder().setMinorType(MinorType.UNION).setMode(DataMode.OPTIONAL);
+  if (thenType == MinorType.UNION) {
+for (MinorType subType : 
conditions.expression.getMajorType().getSubTypeList()) {
+  builder.addSubType(subType);
+}
+  } else {
+builder.addSubType(thenType);
+  }
+  if (elseType == MinorType.UNION) {
+for (MinorType subType : 
newElseExpr.getMajorType().getSubTypeList()) {
+  builder.addSubType(subType);
+}
+  } else {
+builder.addSubType(elseType);
+  }
+  MajorType unionType = builder.build();
   conditions = new IfExpression.IfCondition(newCondition,
-  addCastExpression(conditions.expression, 
newElseExpr.getMajorType(), functionLookupContext, errorCollector));
-} else if (leastRestrictive != elseType) {
-  // Implicitly cast the else expression
-  newElseExpr = addCastExpression(newElseExpr, 
conditions.expression.getMajorType(), functionLookupContext, errorCollector);
-} else {
-  /* Cannot cast one of the two expressions to make the output 
type of if and else expression
-   * to be the same. Raise error.
-   */
-  throw new DrillRuntimeException("Case expression should have 
similar output type on all its branches");
+  addCastExpression(conditions.expression, unionType, 
functionLookupContext, errorCollector));
+  newElseExpr = addCastExpression(newElseExpr, unionType, 
functionLookupContext, errorCollector);
+}
+
+  } else {
+// Check if we need a cast
+if (thenType != elseType && !(thenType == MinorType.NULL || 
elseType == MinorType.NULL)) {
+
+  MinorType leastRestrictive = 
TypeCastRules.getLeastRestrictiveType((Arrays.asList(thenType, elseType)));
+  if (leastRestrictive != thenType) {
+// Implicitly cast the then expression
+conditions = new IfExpression.IfCondition(newCondition,
+addCastExpression(conditions.expression, 
newElseExpr.getMajorType(), functionLookupContext, errorCollector));
+  } else if (leastRestrictive != elseType) {
+// Implicitly cast the else expression
+newElseExpr = addCastExpression(newElseExpr, 
conditions.expression.getMajorType(), functionLookupContext, errorCollector);
+  } else {
+/* Cannot cast one of the two expressions to make the output 
type of if and else expression
+ * to be the same. Raise error.
+ */
+throw new DrillRuntimeException("Case expression should have 
similar output type on all its branches");
--- End diff --

I don't think this is a problem. In this case, the case statement will not 
coerce to a type unless one of the branches output is that type. So as long as 
there is no Union type in the input, there won't be a union in the output.


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  Issue Type: Sub-task
>  Components: Execution - Codegen, Execution 

[jira] [Commented] (DRILL-3994) Build Fails on Windows after DRILL-3742

2015-10-30 Thread Julien Le Dem (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3994?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983615#comment-14983615
 ] 

Julien Le Dem commented on DRILL-3994:
--

I'm looking into it. Currently installing windows.
It looks like the problem is at the following line:
https://github.com/apache/drill/blob/e4b94a78487f844be4fe71c4b9bf88b16c7f42f7/common/src/main/java/org/apache/drill/common/scanner/BuildTimeScan.java#L126
{noformat}
URL url = new URL("file:" + basePath);
Set markedPaths = ClassPathScanner.getMarkedPaths();
if (!markedPaths.contains(url)) {
  throw new IllegalArgumentException(url + " not in " + markedPaths);
}
{noformat}
where url is file:C:/ but getMarkedPaths returns a URL of form file:/C:/...

> Build Fails on Windows after DRILL-3742
> ---
>
> Key: DRILL-3994
> URL: https://issues.apache.org/jira/browse/DRILL-3994
> Project: Apache Drill
>  Issue Type: Bug
>  Components: Tools, Build & Test
>Reporter: Sudheesh Katkam
>Assignee: Julien Le Dem
>Priority: Critical
> Fix For: 1.3.0
>
>
> Build fails on Windows on the latest master:
> {code}
> c:\drill> mvn clean install -DskipTests 
> ...
> [INFO] Rat check: Summary of files. Unapproved: 0 unknown: 0 generated: 0 
> approved: 169 licence.
> [INFO] 
> [INFO] <<< exec-maven-plugin:1.2.1:java (default) < validate @ drill-common 
> <<<
> [INFO] 
> [INFO] --- exec-maven-plugin:1.2.1:java (default) @ drill-common ---
> SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
> SLF4J: Defaulting to no-operation (NOP) logger implementation
> SLF4J: See 
> http://www.slf4j.org/codes.html#StaticLoggerBinder
>  for further details.
> Scanning: C:\drill\common\target\classes
> [WARNING] 
> java.lang.reflect.InvocationTargetException
>   at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>   at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>   at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>   at java.lang.reflect.Method.invoke(Method.java:606)
>   at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:297)
>   at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.IllegalArgumentException: 
> file:C:/drill/common/target/classes/ not in 
> [file:/C:/drill/common/target/classes/]
>   at 
> org.apache.drill.common.scanner.BuildTimeScan.main(BuildTimeScan.java:129)
>   ... 6 more
> [INFO] 
> 
> [INFO] Reactor Summary:
> [INFO] 
> [INFO] Apache Drill Root POM .. SUCCESS [ 10.016 
> s]
> [INFO] tools/Parent Pom ... SUCCESS [  1.062 
> s]
> [INFO] tools/freemarker codegen tooling ... SUCCESS [  6.922 
> s]
> [INFO] Drill Protocol . SUCCESS [ 10.062 
> s]
> [INFO] Common (Logical Plan, Base expressions)  FAILURE [  9.954 
> s]
> [INFO] contrib/Parent Pom . SKIPPED
> [INFO] contrib/data/Parent Pom  SKIPPED
> [INFO] contrib/data/tpch-sample-data .. SKIPPED
> [INFO] exec/Parent Pom  SKIPPED
> [INFO] exec/Java Execution Engine . SKIPPED
> [INFO] exec/JDBC Driver using dependencies  SKIPPED
> [INFO] JDBC JAR with all dependencies . SKIPPED
> [INFO] contrib/mongo-storage-plugin ... SKIPPED
> [INFO] contrib/hbase-storage-plugin ... SKIPPED
> [INFO] contrib/jdbc-storage-plugin  SKIPPED
> [INFO] contrib/hive-storage-plugin/Parent Pom . SKIPPED
> [INFO] contrib/hive-storage-plugin/hive-exec-shaded ... SKIPPED
> [INFO] contrib/hive-storage-plugin/core ... SKIPPED
> [INFO] contrib/drill-gis-plugin ... SKIPPED
> [INFO] Packaging and Distribution Assembly  SKIPPED
> [INFO] contrib/sqlline  SKIPPED
> [INFO] 
> 
> [INFO] BUILD FAILURE
> [INFO] 
> 
> [INFO] Total time: 38.813 s
> [INFO] Finished at: 2015-10-28T12:17:19-07:00
> [INFO] Final Memory: 67M/466M
> [INFO] 
> 
> [ERROR] Failed to execute goal org.codehaus.mojo:exec-maven-plugin:1.2.1:java 
> (default) on project drill-common: An exception occured while executing the 
> Java class. null: InvocationTargetException: 
> file:C:/drill/common/target/classes/ not in 
> 

[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983641#comment-14983641
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43563951
  
--- Diff: 
exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/UnionSqlAccessor.java
 ---
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.accessor;
+
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.vector.complex.impl.UnionVector;
+import org.apache.drill.exec.vector.complex.impl.UnionWriter;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+import java.io.InputStream;
+import java.io.Reader;
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+
+public class UnionSqlAccessor extends AbstractSqlAccessor {
+
+  FieldReader reader;
--- End diff --

JDBC functionality wasn't really a goal here, just bare minimum so that the 
results will display in sqlline


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  Issue Type: Sub-task
>  Components: Execution - Codegen, Execution - Data Types, Execution - 
> Relational Operators, Functions - Drill
>Reporter: Steven Phillips
>Assignee: Hanifi Gunes
> Fix For: 1.3.0
>
>
> Support the ability for existing vectors to be promoted similar to supported 
> implicit casting rules.
> For example:
> INT > DOUBLE > STRING > EMBEDDED



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3963) Read raw key value bytes from sequence files

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3963?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983343#comment-14983343
 ] 

ASF GitHub Bot commented on DRILL-3963:
---

Github user sudheeshkatkam commented on the pull request:

https://github.com/apache/drill/pull/214#issuecomment-152654691
  
I am not able to build with this patch (`mvn clean install`); I am getting 
an error that does not happen on master. What am I missing?

This is in the jdbc-all module.
```
...
Running org.apache.drill.jdbc.ITTestShadedJar
java.class.path:

/Users/skatkam/Documents/drill/exec/jdbc-all/target/test-classes:/Users/skatkam/Documents/drill/exec/jdbc-all/target/classes:/Users/skatkam/.m2/repository/junit/junit/4.11/junit-4.11.jar:/Users/skatkam/.m2/repository/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar:/Users/skatkam/Documents/drill/exec/jdbc-all/target/test-classes/:
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further 
details.
Oct 30, 2015 10:13:39 AM 
oadd.io.netty.channel.AbstractChannelHandlerContext invokeExceptionCaught
WARNING: An exception was thrown by a user handler's exceptionCaught() 
method while handling the following exception:
java.lang.ExceptionInInitializerError
at 
oadd.org.apache.drill.exec.proto.UserBitShared$QueryId.internalGetFieldAccessorTable(UserBitShared.java:1317)
at 
oadd.com.google.protobuf.GeneratedMessage.getDescriptorForType(GeneratedMessage.java:98)
at 
oadd.com.google.protobuf.AbstractMessage.hashCode(AbstractMessage.java:190)
at 
java.util.concurrent.ConcurrentHashMap.hash(ConcurrentHashMap.java:333)
at 
java.util.concurrent.ConcurrentHashMap.putIfAbsent(ConcurrentHashMap.java:1145)
at 
oadd.org.apache.drill.exec.rpc.user.QueryResultHandler$SubmissionListener.success(QueryResultHandler.java:328)
at 
oadd.org.apache.drill.exec.rpc.user.QueryResultHandler$SubmissionListener.success(QueryResultHandler.java:278)
at 
oadd.org.apache.drill.exec.rpc.CoordinationQueue$RpcListener.set(CoordinationQueue.java:98)
at 
oadd.org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:262)
at 
oadd.org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:205)
at 
oadd.io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:89)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
oadd.io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:254)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
oadd.io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
oadd.io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:242)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
oadd.io.netty.channel.ChannelInboundHandlerAdapter.channelRead(ChannelInboundHandlerAdapter.java:86)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:339)
at 
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:324)
at 
oadd.io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:847)
at 
oadd.io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
at 
oadd.io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
at 
oadd.io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
at 
oadd.io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
at oadd.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
at 
oadd.io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
at java.lang.Thread.run(Thread.java:745)
Caused by: 

[jira] [Commented] (DRILL-2288) missing JDBC metadata (schema) for 0-row results--ScanBatch violating IterOutcome protocol

2015-10-30 Thread Daniel Barclay (Drill) (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-2288?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983410#comment-14983410
 ] 

Daniel Barclay (Drill) commented on DRILL-2288:
---



Chain of bugs and problems encountered and (partially) addressed:

1.  {{ScanBatch.next()}} returned {{NONE}} without ever returning 
{{OK_NEW_SCHEMA}} for a source having zero rows (so downstream operators didn't 
get its schema, even for static-schema sources, or even get trigger to update 
their own schema).

2.  {{RecordBatch.IterOutcome}}, especially the allowed sequence of values, was 
not documented clearly (so developers didn't know correctly what to expect or 
provide).

3.  {{IteratorValidatorBatchIterator}} didn't validate the sequence of 
{{IterOutcome values}} (so developers weren't notified about incorrect results).

4.  {{UnionAllRecordBatch}} did not interpret {{NONE}} and {{OK_NEW_SCHEMA}} 
correctly (so it reported spurious/incorrect schema-change and/or 
empty-/non-empty input exceptions).

5.  {{ScanBatch.Mutator.isNewSchema()}} didn't handle a short-circuit OR 
{"{{||}}"} correctly in calling {{SchemaChangeCallBack.getSchemaChange()}} (so 
it didn't reset nested schema-change state, and so caused spurious 
{{OK_NEW_SCHEMA}} notifications and downstream exceptions).

6.  {{JsonRecordReader.ensureAtLeastOneField()}} didn't check whether any field 
already existed in the batch (so in that case it forcibly changed the type to 
{{NullableIntVector}}, causing schema changes and downstream exceptions). 
\[Note:  DRILL-2288 does not address other problems with {{NullableIntVector}} 
dummy columns from {{JsonRecordReader}}.]

7.  HBase tests used only one table region, ignoring known problems with 
multi-region HBase tables (so latent {{HBaseRecordReader}} problems were left 
undetected and unresolved.)   \[Note: DRILL-2288 addresses only one test table 
(increasing the number of regions on the other test tables exposes at least one 
other problem).]

8.  {{HBaseRecordReader}} didn't create a {{MapVector}} for every column family 
(so {{NullableIntVector}} dummy columns got created, causing spurious schema 
changes and downstream exceptions).

9.  Some {{RecordBatch}} classes didn't reset their record counts to zero 
({{OrderedPartitionRecordBatch.recordCount}}, 
{{ProjectRecordBatch.recordCount}}, and/or {{TopNBatch.recordCount}}) (so 
downstream code tried to access elements of (correctly) empty vectors, yielding 
{{IndexOutOfBoundException}} (with ~"... {{range (0, 0)}}") ).

10.  {{RecordBatchLoader}}'s record count was not reset to zero by 
{{UnorderedReceiverBatch}} (so, again, downstream code tried to access elements 
of (correctly) empty vectors, yielding {{IndexOutOfBoundException}} (with ~"... 
{{range (0, 0)}}") ).

11.  {{MapVector.load(...)}} left some existing vectors empty, not matching the 
returned length and the length of sibling vectors (so 
{{MapVector.getObject(int)}} got {{IndexOutOfBoundException}} (with ~"... 
{{range (0, 0)}}").  \[Note: DRILL-2288 does not address the root problem.]


> missing JDBC metadata (schema) for 0-row results--ScanBatch violating 
> IterOutcome protocol
> --
>
> Key: DRILL-2288
> URL: https://issues.apache.org/jira/browse/DRILL-2288
> Project: Apache Drill
>  Issue Type: Bug
>  Components: Storage - Information Schema
>Reporter: Daniel Barclay (Drill)
>Assignee: Daniel Barclay (Drill)
> Fix For: 1.3.0
>
> Attachments: Drill2288NoResultSetMetadataWhenZeroRowsTest.java
>
>
> The ResultSetMetaData object from getMetadata() of a ResultSet is not set up 
> (getColumnCount() returns zero, and trying to access any other metadata 
> throws IndexOutOfBoundsException) for a result set with zero rows, at least 
> for one from DatabaseMetaData.getColumns(...).



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983293#comment-14983293
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43550818
  
--- Diff: 
exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java ---
@@ -26,12 +26,23 @@
 import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.planner.physical.PlannerSettings;
+import org.junit.After;
 import org.junit.Ignore;
 import org.junit.Test;
 
 public class TestExampleQueries extends BaseTestQuery {
 //  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestExampleQueries.class);
 
+  @After
--- End diff --

I've removed this code altogether. It's not needed.


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  Issue Type: Sub-task
>  Components: Execution - Codegen, Execution - Data Types, Execution - 
> Relational Operators, Functions - Drill
>Reporter: Steven Phillips
>Assignee: Hanifi Gunes
> Fix For: 1.3.0
>
>
> Support the ability for existing vectors to be promoted similar to supported 
> implicit casting rules.
> For example:
> INT > DOUBLE > STRING > EMBEDDED



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983613#comment-14983613
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43563231
  
--- Diff: 
exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java
 ---
@@ -127,7 +133,7 @@ protected static boolean isComparableType(MajorType 
type) {
 
   private static String formatCanNotCompareMsg(MajorType left, MajorType 
right) {
 StringBuilder sb = new StringBuilder();
-sb.append("Map, Array or repeated scalar type should not be used in 
group by, order by or in a comparison operator. Drill does not support compare 
between ");
+sb.append("Map, Array, Union or repeated scalar type should not be 
used in group by, order by or in a comparison operator. Drill does not support 
compare between ");
--- End diff --

Yes, they could be comparable, but that functionality has not been 
implemented yet.


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  Issue Type: Sub-task
>  Components: Execution - Codegen, Execution - Data Types, Execution - 
> Relational Operators, Functions - Drill
>Reporter: Steven Phillips
>Assignee: Hanifi Gunes
> Fix For: 1.3.0
>
>
> Support the ability for existing vectors to be promoted similar to supported 
> implicit casting rules.
> For example:
> INT > DOUBLE > STRING > EMBEDDED



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983287#comment-14983287
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43550594
  
--- Diff: 
exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
 ---
@@ -275,83 +275,86 @@ private void consumeEntireNextValue() throws 
IOException {
   private void writeData(MapWriter map, FieldSelection selection, boolean 
moveForward) throws IOException {
 //
 map.start();
-outside: while (true) {
-
-  JsonToken t;
-  if(moveForward){
-t = parser.nextToken();
-  }else{
-t = parser.getCurrentToken();
-moveForward = true;
-  }
+try {
--- End diff --

Yes, I am pretty sure that is the case. I don't remember why exactly I 
needed to put this in a try-finally block.


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  Issue Type: Sub-task
>  Components: Execution - Codegen, Execution - Data Types, Execution - 
> Relational Operators, Functions - Drill
>Reporter: Steven Phillips
>Assignee: Hanifi Gunes
> Fix For: 1.3.0
>
>
> Support the ability for existing vectors to be promoted similar to supported 
> implicit casting rules.
> For example:
> INT > DOUBLE > STRING > EMBEDDED



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3994) Build Fails on Windows after DRILL-3742

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3994?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983624#comment-14983624
 ] 

ASF GitHub Bot commented on DRILL-3994:
---

GitHub user julienledem opened a pull request:

https://github.com/apache/drill/pull/226

DRILL-3994: make classpath scanning work on windows



You can merge this pull request into a Git repository by running:

$ git pull https://github.com/julienledem/drill DRILL-3994

Alternatively you can review and apply these changes as the patch at:

https://github.com/apache/drill/pull/226.patch

To close this pull request, make a commit to your master/trunk branch
with (at least) the following in the commit message:

This closes #226


commit ace0beb2f4904731a20fa5cf1234ac8ef8e55ac9
Author: Julien Le Dem 
Date:   2015-10-30T23:51:09Z

DRILL-3994: make classpath scanning work on windows




> Build Fails on Windows after DRILL-3742
> ---
>
> Key: DRILL-3994
> URL: https://issues.apache.org/jira/browse/DRILL-3994
> Project: Apache Drill
>  Issue Type: Bug
>  Components: Tools, Build & Test
>Reporter: Sudheesh Katkam
>Assignee: Julien Le Dem
>Priority: Critical
> Fix For: 1.3.0
>
>
> Build fails on Windows on the latest master:
> {code}
> c:\drill> mvn clean install -DskipTests 
> ...
> [INFO] Rat check: Summary of files. Unapproved: 0 unknown: 0 generated: 0 
> approved: 169 licence.
> [INFO] 
> [INFO] <<< exec-maven-plugin:1.2.1:java (default) < validate @ drill-common 
> <<<
> [INFO] 
> [INFO] --- exec-maven-plugin:1.2.1:java (default) @ drill-common ---
> SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
> SLF4J: Defaulting to no-operation (NOP) logger implementation
> SLF4J: See 
> http://www.slf4j.org/codes.html#StaticLoggerBinder
>  for further details.
> Scanning: C:\drill\common\target\classes
> [WARNING] 
> java.lang.reflect.InvocationTargetException
>   at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>   at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>   at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>   at java.lang.reflect.Method.invoke(Method.java:606)
>   at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:297)
>   at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.IllegalArgumentException: 
> file:C:/drill/common/target/classes/ not in 
> [file:/C:/drill/common/target/classes/]
>   at 
> org.apache.drill.common.scanner.BuildTimeScan.main(BuildTimeScan.java:129)
>   ... 6 more
> [INFO] 
> 
> [INFO] Reactor Summary:
> [INFO] 
> [INFO] Apache Drill Root POM .. SUCCESS [ 10.016 
> s]
> [INFO] tools/Parent Pom ... SUCCESS [  1.062 
> s]
> [INFO] tools/freemarker codegen tooling ... SUCCESS [  6.922 
> s]
> [INFO] Drill Protocol . SUCCESS [ 10.062 
> s]
> [INFO] Common (Logical Plan, Base expressions)  FAILURE [  9.954 
> s]
> [INFO] contrib/Parent Pom . SKIPPED
> [INFO] contrib/data/Parent Pom  SKIPPED
> [INFO] contrib/data/tpch-sample-data .. SKIPPED
> [INFO] exec/Parent Pom  SKIPPED
> [INFO] exec/Java Execution Engine . SKIPPED
> [INFO] exec/JDBC Driver using dependencies  SKIPPED
> [INFO] JDBC JAR with all dependencies . SKIPPED
> [INFO] contrib/mongo-storage-plugin ... SKIPPED
> [INFO] contrib/hbase-storage-plugin ... SKIPPED
> [INFO] contrib/jdbc-storage-plugin  SKIPPED
> [INFO] contrib/hive-storage-plugin/Parent Pom . SKIPPED
> [INFO] contrib/hive-storage-plugin/hive-exec-shaded ... SKIPPED
> [INFO] contrib/hive-storage-plugin/core ... SKIPPED
> [INFO] contrib/drill-gis-plugin ... SKIPPED
> [INFO] Packaging and Distribution Assembly  SKIPPED
> [INFO] contrib/sqlline  SKIPPED
> [INFO] 
> 
> [INFO] BUILD FAILURE
> [INFO] 
> 
> [INFO] Total time: 38.813 s
> [INFO] Finished at: 2015-10-28T12:17:19-07:00
> [INFO] Final Memory: 67M/466M
> [INFO] 
> 
> [ERROR] Failed to execute goal org.codehaus.mojo:exec-maven-plugin:1.2.1:java 
> (default) on project 

[jira] [Commented] (DRILL-3994) Build Fails on Windows after DRILL-3742

2015-10-30 Thread Julien Le Dem (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3994?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983626#comment-14983626
 ] 

Julien Le Dem commented on DRILL-3994:
--

I have not tested it on windows but here a potential fix:
https://github.com/apache/drill/pull/226

> Build Fails on Windows after DRILL-3742
> ---
>
> Key: DRILL-3994
> URL: https://issues.apache.org/jira/browse/DRILL-3994
> Project: Apache Drill
>  Issue Type: Bug
>  Components: Tools, Build & Test
>Reporter: Sudheesh Katkam
>Assignee: Julien Le Dem
>Priority: Critical
> Fix For: 1.3.0
>
>
> Build fails on Windows on the latest master:
> {code}
> c:\drill> mvn clean install -DskipTests 
> ...
> [INFO] Rat check: Summary of files. Unapproved: 0 unknown: 0 generated: 0 
> approved: 169 licence.
> [INFO] 
> [INFO] <<< exec-maven-plugin:1.2.1:java (default) < validate @ drill-common 
> <<<
> [INFO] 
> [INFO] --- exec-maven-plugin:1.2.1:java (default) @ drill-common ---
> SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
> SLF4J: Defaulting to no-operation (NOP) logger implementation
> SLF4J: See 
> http://www.slf4j.org/codes.html#StaticLoggerBinder
>  for further details.
> Scanning: C:\drill\common\target\classes
> [WARNING] 
> java.lang.reflect.InvocationTargetException
>   at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>   at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>   at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>   at java.lang.reflect.Method.invoke(Method.java:606)
>   at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:297)
>   at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.IllegalArgumentException: 
> file:C:/drill/common/target/classes/ not in 
> [file:/C:/drill/common/target/classes/]
>   at 
> org.apache.drill.common.scanner.BuildTimeScan.main(BuildTimeScan.java:129)
>   ... 6 more
> [INFO] 
> 
> [INFO] Reactor Summary:
> [INFO] 
> [INFO] Apache Drill Root POM .. SUCCESS [ 10.016 
> s]
> [INFO] tools/Parent Pom ... SUCCESS [  1.062 
> s]
> [INFO] tools/freemarker codegen tooling ... SUCCESS [  6.922 
> s]
> [INFO] Drill Protocol . SUCCESS [ 10.062 
> s]
> [INFO] Common (Logical Plan, Base expressions)  FAILURE [  9.954 
> s]
> [INFO] contrib/Parent Pom . SKIPPED
> [INFO] contrib/data/Parent Pom  SKIPPED
> [INFO] contrib/data/tpch-sample-data .. SKIPPED
> [INFO] exec/Parent Pom  SKIPPED
> [INFO] exec/Java Execution Engine . SKIPPED
> [INFO] exec/JDBC Driver using dependencies  SKIPPED
> [INFO] JDBC JAR with all dependencies . SKIPPED
> [INFO] contrib/mongo-storage-plugin ... SKIPPED
> [INFO] contrib/hbase-storage-plugin ... SKIPPED
> [INFO] contrib/jdbc-storage-plugin  SKIPPED
> [INFO] contrib/hive-storage-plugin/Parent Pom . SKIPPED
> [INFO] contrib/hive-storage-plugin/hive-exec-shaded ... SKIPPED
> [INFO] contrib/hive-storage-plugin/core ... SKIPPED
> [INFO] contrib/drill-gis-plugin ... SKIPPED
> [INFO] Packaging and Distribution Assembly  SKIPPED
> [INFO] contrib/sqlline  SKIPPED
> [INFO] 
> 
> [INFO] BUILD FAILURE
> [INFO] 
> 
> [INFO] Total time: 38.813 s
> [INFO] Finished at: 2015-10-28T12:17:19-07:00
> [INFO] Final Memory: 67M/466M
> [INFO] 
> 
> [ERROR] Failed to execute goal org.codehaus.mojo:exec-maven-plugin:1.2.1:java 
> (default) on project drill-common: An exception occured while executing the 
> Java class. null: InvocationTargetException: 
> file:C:/drill/common/target/classes/ not in 
> [file:/C:/drill/common/target/classes/] -> [Help 1]
> [ERROR] 
> [ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
> switch.
> [ERROR] Re-run Maven using the -X switch to enable full debug logging.
> [ERROR] 
> [ERROR] For more information about the errors and possible solutions, please 
> read the following articles:
> [ERROR] [Help 1] 
> http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
> [ERROR] 
> [ERROR] After correcting the problems, you can resume 

[jira] [Created] (DRILL-3998) Check skipping of .clear and .release in BaseTestQuery.printResult(...) (bug?)

2015-10-30 Thread Daniel Barclay (Drill) (JIRA)
Daniel Barclay (Drill) created DRILL-3998:
-

 Summary: Check skipping of .clear and .release in 
BaseTestQuery.printResult(...) (bug?)
 Key: DRILL-3998
 URL: https://issues.apache.org/jira/browse/DRILL-3998
 Project: Apache Drill
  Issue Type: Bug
  Components: Tools, Build & Test
Reporter: Daniel Barclay (Drill)


In {{BaseTestQuery.printResult(...)}}, if a loaded record batch has no records, 
the code skips calling not only the printout method but also 
{{RecordBatchLoader.clear()}} and {{QueryDataBatch.release()}} methods.  Is 
that correct?

(At some point in debugging DRILL-2288, that skipping of {{clear}} and 
{{release}} seemed to cause reporting of a memory leak.)





--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3747) UDF for "fuzzy" string and similarity matching

2015-10-30 Thread Karol Potocki (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3747?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14982753#comment-14982753
 ] 

Karol Potocki commented on DRILL-3747:
--

Such functionality is often required when we search through data produced by 
user collaboration (i.e. street names etc. in internet datasources) or we make 
search conditions based on user input (handling spelling mistakes).
Recently I needed solution like that, basic implementation is on my github:
https://github.com/k255/drill-fuzzy-search
It works on simmetrics library which recently went apache license.

> UDF for "fuzzy" string and similarity matching
> --
>
> Key: DRILL-3747
> URL: https://issues.apache.org/jira/browse/DRILL-3747
> Project: Apache Drill
>  Issue Type: New Feature
>  Components: Functions - Drill
>Affects Versions: Future
>Reporter: Edmon Begoli
>Priority: Minor
>  Labels: features
> Fix For: Future
>
>   Original Estimate: 672h
>  Remaining Estimate: 672h
>
> I propose implementation of string/distance or distance matching functions 
> similar to what one finds in most of other databases - soundex, metaphone, 
> levenshtein (and more advanced variants such as levenshtein-damerau, 
> jaro-winkler, etc.).
> See fuzzystrmatch 
> http://www.postgresql.org/docs/9.5/static/fuzzystrmatch.html, 
> and pg_similarity http://pgsimilarity.projects.pgfoundry.org/
> for inspiration.



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3747) UDF for "fuzzy" string and similarity matching

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3747?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983103#comment-14983103
 ] 

ASF GitHub Bot commented on DRILL-3747:
---

GitHub user k255 opened a pull request:

https://github.com/apache/drill/pull/224

DRILL-3747: basic similarity search with simmetric

Helps handling i.e. typos in search queries with popular algorithms like 
levenshtein.
Sample query:
```
select levenshtein('foo', 'boo') from (VALUES(1)); //gives 0.67
```
and
```
select levenshtein('foo', 'bar') from (VALUES(1)); //not similar - gives 0
```
More:
https://github.com/k255/drill-fuzzy-search
https://en.wikipedia.org/wiki/Levenshtein_distance

You can merge this pull request into a Git repository by running:

$ git pull https://github.com/k255/drill drill-fuzzysearch

Alternatively you can review and apply these changes as the patch at:

https://github.com/apache/drill/pull/224.patch

To close this pull request, make a commit to your master/trunk branch
with (at least) the following in the commit message:

This closes #224


commit 51248358adf7ee71a744cccb7a22b45850f192a8
Author: potocki 
Date:   2015-10-30T18:54:41Z

basic similarity search with simmetric




> UDF for "fuzzy" string and similarity matching
> --
>
> Key: DRILL-3747
> URL: https://issues.apache.org/jira/browse/DRILL-3747
> Project: Apache Drill
>  Issue Type: New Feature
>  Components: Functions - Drill
>Affects Versions: Future
>Reporter: Edmon Begoli
>Priority: Minor
>  Labels: features
> Fix For: Future
>
>   Original Estimate: 672h
>  Remaining Estimate: 672h
>
> I propose implementation of string/distance or distance matching functions 
> similar to what one finds in most of other databases - soundex, metaphone, 
> levenshtein (and more advanced variants such as levenshtein-damerau, 
> jaro-winkler, etc.).
> See fuzzystrmatch 
> http://www.postgresql.org/docs/9.5/static/fuzzystrmatch.html, 
> and pg_similarity http://pgsimilarity.projects.pgfoundry.org/
> for inspiration.



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Updated] (DRILL-3634) Hive Scan : Add fileCount (no of files scanned) or no of partitions scanned to the text plan

2015-10-30 Thread Rahul Challapalli (JIRA)

 [ 
https://issues.apache.org/jira/browse/DRILL-3634?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Rahul Challapalli updated DRILL-3634:
-
Description: 
The hive scan portion of the text plan only lists the files scanned. It would 
be helpful if the text plan also had fileCount value or the number of 
partitions scanned.

Reason : Currently as part of our tests we are verifying plans using a regex 
based verification and the expected regex is matching more than it should. 
Fixing this might be hard. So if we have the fileCount/partitionCount as part 
of the plan, the plan comparision will be more accurate

  was:
The hive scan portion of the text plan only lists the files scanned. It would 
be helpful if the text plan also had fileCount value.

Reason : Currently as part of our tests we are verifying plans using a regex 
based verification and the expected regex is matching more than it should. 
Fixing this might be hard. So if we have the fileCount as part of the plan, the 
plan comparision will be more accurate


> Hive Scan : Add fileCount (no of files scanned) or no of partitions scanned 
> to the text plan
> 
>
> Key: DRILL-3634
> URL: https://issues.apache.org/jira/browse/DRILL-3634
> Project: Apache Drill
>  Issue Type: Bug
>  Components: Storage - Hive
>Affects Versions: 1.2.0
>Reporter: Rahul Challapalli
> Fix For: Future
>
>
> The hive scan portion of the text plan only lists the files scanned. It would 
> be helpful if the text plan also had fileCount value or the number of 
> partitions scanned.
> Reason : Currently as part of our tests we are verifying plans using a regex 
> based verification and the expected regex is matching more than it should. 
> Fixing this might be hard. So if we have the fileCount/partitionCount as part 
> of the plan, the plan comparision will be more accurate



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Updated] (DRILL-3994) Build Fails on Windows after DRILL-3742

2015-10-30 Thread Sudheesh Katkam (JIRA)

 [ 
https://issues.apache.org/jira/browse/DRILL-3994?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Sudheesh Katkam updated DRILL-3994:
---
Fix Version/s: 1.3.0

> Build Fails on Windows after DRILL-3742
> ---
>
> Key: DRILL-3994
> URL: https://issues.apache.org/jira/browse/DRILL-3994
> Project: Apache Drill
>  Issue Type: Bug
>  Components: Tools, Build & Test
>Reporter: Sudheesh Katkam
>Assignee: Julien Le Dem
>Priority: Critical
> Fix For: 1.3.0
>
>
> Build fails on Windows on the latest master:
> {code}
> c:\drill> mvn clean install -DskipTests 
> ...
> [INFO] Rat check: Summary of files. Unapproved: 0 unknown: 0 generated: 0 
> approved: 169 licence.
> [INFO] 
> [INFO] <<< exec-maven-plugin:1.2.1:java (default) < validate @ drill-common 
> <<<
> [INFO] 
> [INFO] --- exec-maven-plugin:1.2.1:java (default) @ drill-common ---
> SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
> SLF4J: Defaulting to no-operation (NOP) logger implementation
> SLF4J: See 
> http://www.slf4j.org/codes.html#StaticLoggerBinder
>  for further details.
> Scanning: C:\drill\common\target\classes
> [WARNING] 
> java.lang.reflect.InvocationTargetException
>   at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>   at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>   at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>   at java.lang.reflect.Method.invoke(Method.java:606)
>   at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:297)
>   at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.IllegalArgumentException: 
> file:C:/drill/common/target/classes/ not in 
> [file:/C:/drill/common/target/classes/]
>   at 
> org.apache.drill.common.scanner.BuildTimeScan.main(BuildTimeScan.java:129)
>   ... 6 more
> [INFO] 
> 
> [INFO] Reactor Summary:
> [INFO] 
> [INFO] Apache Drill Root POM .. SUCCESS [ 10.016 
> s]
> [INFO] tools/Parent Pom ... SUCCESS [  1.062 
> s]
> [INFO] tools/freemarker codegen tooling ... SUCCESS [  6.922 
> s]
> [INFO] Drill Protocol . SUCCESS [ 10.062 
> s]
> [INFO] Common (Logical Plan, Base expressions)  FAILURE [  9.954 
> s]
> [INFO] contrib/Parent Pom . SKIPPED
> [INFO] contrib/data/Parent Pom  SKIPPED
> [INFO] contrib/data/tpch-sample-data .. SKIPPED
> [INFO] exec/Parent Pom  SKIPPED
> [INFO] exec/Java Execution Engine . SKIPPED
> [INFO] exec/JDBC Driver using dependencies  SKIPPED
> [INFO] JDBC JAR with all dependencies . SKIPPED
> [INFO] contrib/mongo-storage-plugin ... SKIPPED
> [INFO] contrib/hbase-storage-plugin ... SKIPPED
> [INFO] contrib/jdbc-storage-plugin  SKIPPED
> [INFO] contrib/hive-storage-plugin/Parent Pom . SKIPPED
> [INFO] contrib/hive-storage-plugin/hive-exec-shaded ... SKIPPED
> [INFO] contrib/hive-storage-plugin/core ... SKIPPED
> [INFO] contrib/drill-gis-plugin ... SKIPPED
> [INFO] Packaging and Distribution Assembly  SKIPPED
> [INFO] contrib/sqlline  SKIPPED
> [INFO] 
> 
> [INFO] BUILD FAILURE
> [INFO] 
> 
> [INFO] Total time: 38.813 s
> [INFO] Finished at: 2015-10-28T12:17:19-07:00
> [INFO] Final Memory: 67M/466M
> [INFO] 
> 
> [ERROR] Failed to execute goal org.codehaus.mojo:exec-maven-plugin:1.2.1:java 
> (default) on project drill-common: An exception occured while executing the 
> Java class. null: InvocationTargetException: 
> file:C:/drill/common/target/classes/ not in 
> [file:/C:/drill/common/target/classes/] -> [Help 1]
> [ERROR] 
> [ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
> switch.
> [ERROR] Re-run Maven using the -X switch to enable full debug logging.
> [ERROR] 
> [ERROR] For more information about the errors and possible solutions, please 
> read the following articles:
> [ERROR] [Help 1] 
> http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
> [ERROR] 
> [ERROR] After correcting the problems, you can resume the build with the 
> command
> [ERROR]   mvn  -rf :drill-common
> {code}



--
This message was sent by 

[jira] [Commented] (DRILL-3871) Exception on inner join when join predicate is int96 field generated by impala

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3871?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14982887#comment-14982887
 ] 

ASF GitHub Bot commented on DRILL-3871:
---

Github user jaltekruse commented on the pull request:

https://github.com/apache/drill/pull/219#issuecomment-152588218
  
+1, thanks for cleaning this up Parth!


> Exception on inner join when join predicate is int96 field generated by impala
> --
>
> Key: DRILL-3871
> URL: https://issues.apache.org/jira/browse/DRILL-3871
> Project: Apache Drill
>  Issue Type: Bug
>  Components: Execution - Data Types
>Affects Versions: 1.2.0
>Reporter: Victoria Markman
>Assignee: Deneche A. Hakim
>Priority: Critical
>  Labels: int96
> Fix For: 1.3.0
>
> Attachments: tables.tar
>
>
> Both tables in the join where created by impala, with column c_timestamp 
> being parquet int96. 
> {code}
> 0: jdbc:drill:schema=dfs> select
> . . . . . . . . . . . . > max(t1.c_timestamp),
> . . . . . . . . . . . . > min(t1.c_timestamp),
> . . . . . . . . . . . . > count(t1.c_timestamp)
> . . . . . . . . . . . . > from
> . . . . . . . . . . . . > imp_t1 t1
> . . . . . . . . . . . . > inner join
> . . . . . . . . . . . . > imp_t2 t2
> . . . . . . . . . . . . > on  (t1.c_timestamp = t2.c_timestamp)
> . . . . . . . . . . . . > ;
> java.lang.RuntimeException: java.sql.SQLException: SYSTEM ERROR: 
> TProtocolException: Required field 'uncompressed_page_size' was not found in 
> serialized data! Struct: PageHeader(type:null, uncompressed_page_size:0, 
> compressed_page_size:0)
> Fragment 0:0
> [Error Id: eb6a5df8-fc59-409b-957a-59cb1079b5b8 on atsqa4-133.qa.lab:31010]
> at sqlline.IncrementalRows.hasNext(IncrementalRows.java:73)
> at 
> sqlline.TableOutputFormat$ResizingRowsProvider.next(TableOutputFormat.java:87)
> at sqlline.TableOutputFormat.print(TableOutputFormat.java:118)
> at sqlline.SqlLine.print(SqlLine.java:1583)
> at sqlline.Commands.execute(Commands.java:852)
> at sqlline.Commands.sql(Commands.java:751)
> at sqlline.SqlLine.dispatch(SqlLine.java:738)
> at sqlline.SqlLine.begin(SqlLine.java:612)
> at sqlline.SqlLine.start(SqlLine.java:366)
> at sqlline.SqlLine.main(SqlLine.java:259)
> {code}
> drillbit.log
> {code}
> 2015-09-30 21:15:45,710 [29f3aefe-3209-a6e6-0418-500dac60a339:foreman] INFO  
> o.a.d.exec.store.parquet.Metadata - Took 0 ms to get file statuses
> 2015-09-30 21:15:45,712 [29f3aefe-3209-a6e6-0418-500dac60a339:foreman] INFO  
> o.a.d.exec.store.parquet.Metadata - Fetch parquet metadata: Executed 1 out of 
> 1 using 1 threads. Time: 1ms total, 1.645381ms avg, 1ms max.
> 2015-09-30 21:15:45,712 [29f3aefe-3209-a6e6-0418-500dac60a339:foreman] INFO  
> o.a.d.exec.store.parquet.Metadata - Fetch parquet metadata: Executed 1 out of 
> 1 using 1 threads. Earliest start: 1.332000 μs, Latest start: 1.332000 μs, 
> Average start: 1.332000 μs .
> 2015-09-30 21:15:45,830 [29f3aefe-3209-a6e6-0418-500dac60a339:frag:0:0] INFO  
> o.a.d.e.w.fragment.FragmentExecutor - 
> 29f3aefe-3209-a6e6-0418-500dac60a339:0:0: State change requested 
> AWAITING_ALLOCATION --> RUNNING
> 2015-09-30 21:15:45,830 [29f3aefe-3209-a6e6-0418-500dac60a339:frag:0:0] INFO  
> o.a.d.e.w.f.FragmentStatusReporter - 
> 29f3aefe-3209-a6e6-0418-500dac60a339:0:0: State to report: RUNNING
> 2015-09-30 21:15:45,925 [29f3aefe-3209-a6e6-0418-500dac60a339:frag:0:0] INFO  
> o.a.d.e.w.fragment.FragmentExecutor - 
> 29f3aefe-3209-a6e6-0418-500dac60a339:0:0: State change requested RUNNING --> 
> FAILED
> 2015-09-30 21:15:45,930 [29f3aefe-3209-a6e6-0418-500dac60a339:frag:0:0] INFO  
> o.a.d.e.w.fragment.FragmentExecutor - 
> 29f3aefe-3209-a6e6-0418-500dac60a339:0:0: State change requested FAILED --> 
> FINISHED
> 2015-09-30 21:15:45,931 [29f3aefe-3209-a6e6-0418-500dac60a339:frag:0:0] ERROR 
> o.a.d.e.w.fragment.FragmentExecutor - SYSTEM ERROR: TProtocolException: 
> Required field 'uncompressed_page_size' was not found in serialized data! 
> Struct: PageHeader(type:null, uncompressed_page_size:0, 
> compressed_page_size:0)
> Fragment 0:0
> [Error Id: eb6a5df8-fc59-409b-957a-59cb1079b5b8 on atsqa4-133.qa.lab:31010]
> org.apache.drill.common.exceptions.UserException: SYSTEM ERROR: 
> TProtocolException: Required field 'uncompressed_page_size' was not found in 
> serialized data! Struct: PageHeader(type:null, uncompressed_page_size:0, 
> compressed_page_size:0)
> Fragment 0:0
> [Error Id: eb6a5df8-fc59-409b-957a-59cb1079b5b8 on atsqa4-133.qa.lab:31010]
> at 
> org.apache.drill.common.exceptions.UserException$Builder.build(UserException.java:534)
>  

[jira] [Updated] (DRILL-3634) Hive Scan : Add fileCount (no of files scanned) or no of partitions scanned to the text plan

2015-10-30 Thread Rahul Challapalli (JIRA)

 [ 
https://issues.apache.org/jira/browse/DRILL-3634?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Rahul Challapalli updated DRILL-3634:
-
Summary: Hive Scan : Add fileCount (no of files scanned) or no of 
partitions scanned to the text plan  (was: Hive Scan : Add fileCount (no of 
files scanned) to the text plan)

> Hive Scan : Add fileCount (no of files scanned) or no of partitions scanned 
> to the text plan
> 
>
> Key: DRILL-3634
> URL: https://issues.apache.org/jira/browse/DRILL-3634
> Project: Apache Drill
>  Issue Type: Bug
>  Components: Storage - Hive
>Affects Versions: 1.2.0
>Reporter: Rahul Challapalli
> Fix For: Future
>
>
> The hive scan portion of the text plan only lists the files scanned. It would 
> be helpful if the text plan also had fileCount value.
> Reason : Currently as part of our tests we are verifying plans using a regex 
> based verification and the expected regex is matching more than it should. 
> Fixing this might be hard. So if we have the fileCount as part of the plan, 
> the plan comparision will be more accurate



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Comment Edited] (DRILL-3747) UDF for "fuzzy" string and similarity matching

2015-10-30 Thread Karol Potocki (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3747?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14982753#comment-14982753
 ] 

Karol Potocki edited comment on DRILL-3747 at 10/30/15 6:24 PM:


Such functionality is often required when we search through data produced by 
user collaboration (i.e. street names etc. in internet datasources) or we make 
search conditions based on user input (handling typos).
Recently I needed solution like that, basic implementation is on my github:
https://github.com/k255/drill-fuzzy-search
It works on simmetrics library which recently went apache license.


was (Author: k255):
Such functionality is often required when we search through data produced by 
user collaboration (i.e. street names etc. in internet datasources) or we make 
search conditions based on user input (handling spelling mistakes).
Recently I needed solution like that, basic implementation is on my github:
https://github.com/k255/drill-fuzzy-search
It works on simmetrics library which recently went apache license.

> UDF for "fuzzy" string and similarity matching
> --
>
> Key: DRILL-3747
> URL: https://issues.apache.org/jira/browse/DRILL-3747
> Project: Apache Drill
>  Issue Type: New Feature
>  Components: Functions - Drill
>Affects Versions: Future
>Reporter: Edmon Begoli
>Priority: Minor
>  Labels: features
> Fix For: Future
>
>   Original Estimate: 672h
>  Remaining Estimate: 672h
>
> I propose implementation of string/distance or distance matching functions 
> similar to what one finds in most of other databases - soundex, metaphone, 
> levenshtein (and more advanced variants such as levenshtein-damerau, 
> jaro-winkler, etc.).
> See fuzzystrmatch 
> http://www.postgresql.org/docs/9.5/static/fuzzystrmatch.html, 
> and pg_similarity http://pgsimilarity.projects.pgfoundry.org/
> for inspiration.



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983207#comment-14983207
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43546226
  
--- Diff: 
exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractMapVector.java
 ---
@@ -173,7 +173,7 @@ public ValueVector getChildByOrdinal(int id) {
*
* Note that this method does not enforce any vector type check nor 
throws a schema change exception.
*/
-  protected void putChild(String name, ValueVector vector) {
+  public void putChild(String name, ValueVector vector) {
 putVector(name, vector);
--- End diff --

I'm not sure what it means for what is loaded to be right. But I went ahead 
and moved the UnionVector into the same package as AbstractMapVector, so I can 
use this method without making it public.


> Modify existing vectors to allow type promotion
> ---
>
> Key: DRILL-3232
> URL: https://issues.apache.org/jira/browse/DRILL-3232
> Project: Apache Drill
>  Issue Type: Sub-task
>  Components: Execution - Codegen, Execution - Data Types, Execution - 
> Relational Operators, Functions - Drill
>Reporter: Steven Phillips
>Assignee: Hanifi Gunes
> Fix For: 1.3.0
>
>
> Support the ability for existing vectors to be promoted similar to supported 
> implicit casting rules.
> For example:
> INT > DOUBLE > STRING > EMBEDDED



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)


[jira] [Commented] (DRILL-3232) Modify existing vectors to allow type promotion

2015-10-30 Thread ASF GitHub Bot (JIRA)

[ 
https://issues.apache.org/jira/browse/DRILL-3232?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=14983212#comment-14983212
 ] 

ASF GitHub Bot commented on DRILL-3232:
---

Github user StevenMPhillips commented on a diff in the pull request:

https://github.com/apache/drill/pull/207#discussion_r43546612
  
--- Diff: 
exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/ListVector.java
 ---
@@ -0,0 +1,390 @@

+/***
+
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ 
**/
+package org.apache.drill.exec.vector.complex;
+
+import com.google.common.collect.ObjectArrays;
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.common.expression.FieldReference;
+import org.apache.drill.common.expression.PathSegment;
+import org.apache.drill.common.types.TypeProtos.DataMode;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.proto.UserBitShared;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.record.TransferPair;
+import org.apache.drill.exec.record.TypedFieldId;
+import org.apache.drill.exec.util.CallBack;
+import org.apache.drill.exec.util.JsonStringArrayList;
+import org.apache.drill.exec.vector.AddOrGetResult;
+import org.apache.drill.exec.vector.BaseValueVector;
+import org.apache.drill.exec.vector.UInt1Vector;
+import org.apache.drill.exec.vector.UInt4Vector;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.VectorDescriptor;
+import org.apache.drill.exec.vector.ZeroVector;
+import org.apache.drill.exec.vector.complex.impl.ComplexCopier;
+import org.apache.drill.exec.vector.complex.impl.UnionListReader;
+import org.apache.drill.exec.vector.complex.impl.UnionListWriter;
+import org.apache.drill.exec.vector.complex.impl.UnionVector;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+import org.apache.drill.exec.vector.complex.writer.FieldWriter;
+
+import java.util.List;
+
+public class ListVector extends BaseRepeatedValueVector {
+
+  private UInt4Vector offsets;
+  private final UInt1Vector bits;
+  private Mutator mutator = new Mutator();
+  private Accessor accessor = new Accessor();
+  private UnionListWriter writer;
+  private UnionListReader reader;
+  private CallBack callBack;
+
+  public ListVector(MaterializedField field, BufferAllocator allocator, 
CallBack callBack) {
+super(field, allocator);
+this.bits = new UInt1Vector(MaterializedField.create("$bits$", 
Types.required(MinorType.UINT1)), allocator);
+offsets = getOffsetVector();
+this.field.addChild(getDataVector().getField());
+this.writer = new UnionListWriter(this);
+this.reader = new UnionListReader(this);
+this.callBack = callBack;
+  }
+
+  public UnionListWriter getWriter() {
+return writer;
+  }
+
+  @Override
+  public void allocateNew() throws OutOfMemoryRuntimeException {
+super.allocateNewSafe();
+  }
+
+  public void transferTo(ListVector target) {
+offsets.makeTransferPair(target.offsets).transfer();
+bits.makeTransferPair(target.bits).transfer();
+if (target.getDataVector() instanceof ZeroVector) {
+  target.addOrGetVector(new 
VectorDescriptor(vector.getField().getType()));
+}
+getDataVector().makeTransferPair(target.getDataVector()).transfer();
+  }
+
+  public void copyFromSafe(int inIndex, int outIndex, ListVector from) {
+copyFrom(inIndex, outIndex, from);
+  }
+
+  public void copyFrom(int inIndex, int outIndex,