[ 
https://issues.apache.org/jira/browse/DRILL-7589?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17039072#comment-17039072
 ] 

ASF GitHub Bot commented on DRILL-7589:
---------------------------------------

vvysotskyi commented on pull request #1987: DRILL-7589: Set temporary tests 
folder for UDF_DIRECTORY_LOCAL, fix allocators closing in BloomFilterTest, fix 
permissions issue for TestGracefulShutdown tests
URL: https://github.com/apache/drill/pull/1987#discussion_r380623146
 
 

 ##########
 File path: 
exec/java-exec/src/test/java/org/apache/drill/exec/work/filter/BloomFilterTest.java
 ##########
 @@ -133,214 +135,227 @@ public boolean hasFailed() {
     }
   }
 
-
   @Test
   public void testNotExist() throws Exception {
-    Drillbit bit = new Drillbit(c, RemoteServiceSet.getLocalServiceSet(), 
ClassPathScanner.fromPrescan(c));
-    bit.run();
-    DrillbitContext bitContext = bit.getContext();
-    FunctionImplementationRegistry registry = 
bitContext.getFunctionImplementationRegistry();
-    FragmentContextImpl context = new FragmentContextImpl(bitContext, 
BitControl.PlanFragment.getDefaultInstance(), null, registry);
-    BufferAllocator bufferAllocator = bitContext.getAllocator();
-    //create RecordBatch
-    VarCharVector vector = new VarCharVector(SchemaBuilder.columnSchema("a", 
TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.REQUIRED), bufferAllocator);
-    vector.allocateNew();
-    int valueCount = 3;
-    VarCharVector.Mutator mutator = vector.getMutator();
-    mutator.setSafe(0, "a".getBytes());
-    mutator.setSafe(1, "b".getBytes());
-    mutator.setSafe(2, "c".getBytes());
-    mutator.setValueCount(valueCount);
-    VectorContainer vectorContainer = new VectorContainer();
-    TypedFieldId fieldId = vectorContainer.add(vector);
-    RecordBatch recordBatch = new TestRecordBatch(vectorContainer);
-    //construct hash64
-    ValueVectorReadExpression exp = new ValueVectorReadExpression(fieldId);
-    LogicalExpression[] expressions = new LogicalExpression[1];
-    expressions[0] = exp;
-    TypedFieldId[] fieldIds = new TypedFieldId[1];
-    fieldIds[0] = fieldId;
-    ValueVectorHashHelper valueVectorHashHelper = new 
ValueVectorHashHelper(recordBatch, context);
-    ValueVectorHashHelper.Hash64 hash64 = 
valueVectorHashHelper.getHash64(expressions, fieldIds);
-
-    //construct BloomFilter
-    int numBytes = BloomFilter.optimalNumOfBytes(3, 0.03);
-
-    BloomFilter bloomFilter = new BloomFilter(numBytes, bufferAllocator);
-    for (int i = 0; i < valueCount; i++) {
-      long hashCode = hash64.hash64Code(i, 0, 0);
-      bloomFilter.insert(hashCode);
+    int userPort = QueryTestUtil.getFreePortNumber(31170, 300);
+    int bitPort = QueryTestUtil.getFreePortNumber(31180, 300);
+    ClusterFixtureBuilder clusterFixtureBuilder = 
ClusterFixture.bareBuilder(dirTestWatcher)
+        .configProperty(ExecConstants.INITIAL_USER_PORT, userPort)
+        .configProperty(ExecConstants.INITIAL_BIT_PORT, bitPort)
+        .configProperty(ExecConstants.ALLOW_LOOPBACK_ADDRESS_BINDING, true);
+    try (ClusterFixture cluster = clusterFixtureBuilder.build()) {
+      Drillbit bit = cluster.drillbit();
+      DrillbitContext bitContext = bit.getContext();
+      FunctionImplementationRegistry registry = 
bitContext.getFunctionImplementationRegistry();
+      FragmentContextImpl context = new FragmentContextImpl(bitContext, 
BitControl.PlanFragment.getDefaultInstance(), null, registry);
+      BufferAllocator bufferAllocator = bitContext.getAllocator();
+      //create RecordBatch
+      VarCharVector vector = new VarCharVector(SchemaBuilder.columnSchema("a", 
TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.REQUIRED), bufferAllocator);
+      vector.allocateNew();
+      int valueCount = 3;
+      VarCharVector.Mutator mutator = vector.getMutator();
+      mutator.setSafe(0, "a".getBytes());
+      mutator.setSafe(1, "b".getBytes());
+      mutator.setSafe(2, "c".getBytes());
+      mutator.setValueCount(valueCount);
+      VectorContainer vectorContainer = new VectorContainer();
+      TypedFieldId fieldId = vectorContainer.add(vector);
+      RecordBatch recordBatch = new TestRecordBatch(vectorContainer);
+      //construct hash64
+      ValueVectorReadExpression exp = new ValueVectorReadExpression(fieldId);
+      LogicalExpression[] expressions = new LogicalExpression[1];
+      expressions[0] = exp;
+      TypedFieldId[] fieldIds = new TypedFieldId[1];
+      fieldIds[0] = fieldId;
+      ValueVectorHashHelper valueVectorHashHelper = new 
ValueVectorHashHelper(recordBatch, context);
+      ValueVectorHashHelper.Hash64 hash64 = 
valueVectorHashHelper.getHash64(expressions, fieldIds);
+
+      //construct BloomFilter
+      int numBytes = BloomFilter.optimalNumOfBytes(3, 0.03);
+
+      BloomFilter bloomFilter = new BloomFilter(numBytes, bufferAllocator);
+      for (int i = 0; i < valueCount; i++) {
+        long hashCode = hash64.hash64Code(i, 0, 0);
+        bloomFilter.insert(hashCode);
+      }
+
+      //-----------------create probe side RecordBatch---------------------
+      VarCharVector probeVector = new 
VarCharVector(SchemaBuilder.columnSchema("a", TypeProtos.MinorType.VARCHAR, 
TypeProtos.DataMode.REQUIRED), bufferAllocator);
+      probeVector.allocateNew();
+      int probeValueCount = 1;
+      VarCharVector.Mutator mutator1 = probeVector.getMutator();
+      mutator1.setSafe(0, "f".getBytes());
+      mutator1.setValueCount(probeValueCount);
+      VectorContainer probeVectorContainer = new VectorContainer();
+      TypedFieldId probeFieldId = probeVectorContainer.add(probeVector);
+      RecordBatch probeRecordBatch = new TestRecordBatch(probeVectorContainer);
+      ValueVectorReadExpression probExp = new 
ValueVectorReadExpression(probeFieldId);
+      LogicalExpression[] probExpressions = new LogicalExpression[1];
+      probExpressions[0] = probExp;
+      TypedFieldId[] probeFieldIds = new TypedFieldId[1];
+      probeFieldIds[0] = probeFieldId;
+      ValueVectorHashHelper probeValueVectorHashHelper = new 
ValueVectorHashHelper(probeRecordBatch, context);
+      ValueVectorHashHelper.Hash64 probeHash64 = 
probeValueVectorHashHelper.getHash64(probExpressions, probeFieldIds);
+      long hashCode = probeHash64.hash64Code(0, 0, 0);
+      boolean contain = bloomFilter.find(hashCode);
+      Assert.assertFalse(contain);
+      bloomFilter.getContent().close();
+      vectorContainer.clear();
+      probeVectorContainer.clear();
+      context.close();
     }
-
-    //-----------------create probe side RecordBatch---------------------
-    VarCharVector probeVector = new 
VarCharVector(SchemaBuilder.columnSchema("a", TypeProtos.MinorType.VARCHAR, 
TypeProtos.DataMode.REQUIRED), bufferAllocator);
-    probeVector.allocateNew();
-    int probeValueCount = 1;
-    VarCharVector.Mutator mutator1 = probeVector.getMutator();
-    mutator1.setSafe(0, "f".getBytes());
-    mutator1.setValueCount(probeValueCount);
-    VectorContainer probeVectorContainer = new VectorContainer();
-    TypedFieldId probeFieldId = probeVectorContainer.add(probeVector);
-    RecordBatch probeRecordBatch = new TestRecordBatch(probeVectorContainer);
-    ValueVectorReadExpression probExp = new 
ValueVectorReadExpression(probeFieldId);
-    LogicalExpression[] probExpressions = new LogicalExpression[1];
-    probExpressions[0] = probExp;
-    TypedFieldId[] probeFieldIds = new TypedFieldId[1];
-    probeFieldIds[0] = probeFieldId;
-    ValueVectorHashHelper probeValueVectorHashHelper = new 
ValueVectorHashHelper(probeRecordBatch, context);
-    ValueVectorHashHelper.Hash64 probeHash64 = 
probeValueVectorHashHelper.getHash64(probExpressions, probeFieldIds);
-    long hashCode = probeHash64.hash64Code(0, 0, 0);
-    boolean contain = bloomFilter.find(hashCode);
-    Assert.assertFalse(contain);
-    bloomFilter.getContent().close();
-    vectorContainer.clear();
-    probeVectorContainer.clear();
-    context.close();
-    bitContext.close();
-    bit.close();
   }
 
 
   @Test
   public void testExist() throws Exception {
-
-    Drillbit bit = new Drillbit(c, RemoteServiceSet.getLocalServiceSet(), 
ClassPathScanner.fromPrescan(c));
-    bit.run();
-    DrillbitContext bitContext = bit.getContext();
-    FunctionImplementationRegistry registry = 
bitContext.getFunctionImplementationRegistry();
-    FragmentContextImpl context = new FragmentContextImpl(bitContext, 
BitControl.PlanFragment.getDefaultInstance(), null, registry);
-    BufferAllocator bufferAllocator = bitContext.getAllocator();
-    //create RecordBatch
-    VarCharVector vector = new VarCharVector(SchemaBuilder.columnSchema("a", 
TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.REQUIRED), bufferAllocator);
-    vector.allocateNew();
-    int valueCount = 3;
-    VarCharVector.Mutator mutator = vector.getMutator();
-    mutator.setSafe(0, "a".getBytes());
-    mutator.setSafe(1, "b".getBytes());
-    mutator.setSafe(2, "c".getBytes());
-    mutator.setValueCount(valueCount);
-    VectorContainer vectorContainer = new VectorContainer();
-    TypedFieldId fieldId = vectorContainer.add(vector);
-    RecordBatch recordBatch = new TestRecordBatch(vectorContainer);
-    //construct hash64
-    ValueVectorReadExpression exp = new ValueVectorReadExpression(fieldId);
-    LogicalExpression[] expressions = new LogicalExpression[1];
-    expressions[0] = exp;
-    TypedFieldId[] fieldIds = new TypedFieldId[1];
-    fieldIds[0] = fieldId;
-    ValueVectorHashHelper valueVectorHashHelper = new 
ValueVectorHashHelper(recordBatch, context);
-    ValueVectorHashHelper.Hash64 hash64 = 
valueVectorHashHelper.getHash64(expressions, fieldIds);
-
-    //construct BloomFilter
-    int numBytes = BloomFilter.optimalNumOfBytes(3, 0.03);
-
-    BloomFilter bloomFilter = new BloomFilter(numBytes, bufferAllocator);
-    for (int i = 0; i < valueCount; i++) {
-      long hashCode = hash64.hash64Code(i, 0, 0);
-      bloomFilter.insert(hashCode);
+    int userPort = QueryTestUtil.getFreePortNumber(31170, 300);
+    int bitPort = QueryTestUtil.getFreePortNumber(31180, 300);
+    ClusterFixtureBuilder clusterFixtureBuilder = 
ClusterFixture.bareBuilder(dirTestWatcher)
+        .configProperty(ExecConstants.INITIAL_USER_PORT, userPort)
+        .configProperty(ExecConstants.INITIAL_BIT_PORT, bitPort)
+        .configProperty(ExecConstants.ALLOW_LOOPBACK_ADDRESS_BINDING, true);
+    try (ClusterFixture cluster = clusterFixtureBuilder.build()) {
+      Drillbit bit = cluster.drillbit();
+      DrillbitContext bitContext = bit.getContext();
+      FunctionImplementationRegistry registry = 
bitContext.getFunctionImplementationRegistry();
+      FragmentContextImpl context = new FragmentContextImpl(bitContext, 
BitControl.PlanFragment.getDefaultInstance(), null, registry);
+      BufferAllocator bufferAllocator = bitContext.getAllocator();
+      //create RecordBatch
+      VarCharVector vector = new VarCharVector(SchemaBuilder.columnSchema("a", 
TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.REQUIRED), bufferAllocator);
+      vector.allocateNew();
+      int valueCount = 3;
+      VarCharVector.Mutator mutator = vector.getMutator();
+      mutator.setSafe(0, "a".getBytes());
+      mutator.setSafe(1, "b".getBytes());
+      mutator.setSafe(2, "c".getBytes());
+      mutator.setValueCount(valueCount);
+      VectorContainer vectorContainer = new VectorContainer();
+      TypedFieldId fieldId = vectorContainer.add(vector);
+      RecordBatch recordBatch = new TestRecordBatch(vectorContainer);
+      //construct hash64
+      ValueVectorReadExpression exp = new ValueVectorReadExpression(fieldId);
+      LogicalExpression[] expressions = new LogicalExpression[1];
+      expressions[0] = exp;
+      TypedFieldId[] fieldIds = new TypedFieldId[1];
+      fieldIds[0] = fieldId;
+      ValueVectorHashHelper valueVectorHashHelper = new 
ValueVectorHashHelper(recordBatch, context);
+      ValueVectorHashHelper.Hash64 hash64 = 
valueVectorHashHelper.getHash64(expressions, fieldIds);
+
+      //construct BloomFilter
+      int numBytes = BloomFilter.optimalNumOfBytes(3, 0.03);
+
+      BloomFilter bloomFilter = new BloomFilter(numBytes, bufferAllocator);
+      for (int i = 0; i < valueCount; i++) {
+        long hashCode = hash64.hash64Code(i, 0, 0);
+        bloomFilter.insert(hashCode);
+      }
+
+      //-----------------create probe side RecordBatch---------------------
+      VarCharVector probeVector = new 
VarCharVector(SchemaBuilder.columnSchema("a", TypeProtos.MinorType.VARCHAR, 
TypeProtos.DataMode.REQUIRED), bufferAllocator);
 
 Review comment:
   Rewritten tests to use row sets and moved out common code.
 
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


> TestDynamicUDFSupport fails on GitHub Actions
> ---------------------------------------------
>
>                 Key: DRILL-7589
>                 URL: https://issues.apache.org/jira/browse/DRILL-7589
>             Project: Apache Drill
>          Issue Type: Bug
>    Affects Versions: 1.18.0
>            Reporter: Vova Vysotskyi
>            Assignee: Vova Vysotskyi
>            Priority: Major
>             Fix For: 1.18.0
>
>
> {{TestDynamicUDFSupport}} tests fail when running in GitHub Actions job for 
> occasional JDK version: sometimes passes for specific JDK, but sometimes 
> fails for it.
> Also, different tests from the same test class may fail.
> When enabling logs for tests, the following stack traces are logged:
> {noformat}
> 2020-02-15T10:56:33.8624913Z 10:56:33.855 
> [21b8319e-7e24-a9b9-34b7-74e1d27f64e8:foreman] ERROR 
> o.a.d.e.e.f.FunctionImplementationRegistry - Problem during remote functions 
> load from drill-custom-abs.jar
> 2020-02-15T10:56:33.8626171Z java.io.IOException: Error during jar 
> [drill-custom-abs-sources.jar] coping from 
> [/home/runner/work/drill/drill/exec/java-exec/target/org.apache.drill.exec.udf.dynamic.TestDynamicUDFSupport/udf/drill/udf/registry]
>  to [/tmp/drill/udf/udf/local/]
> 2020-02-15T10:56:33.8626499Z  at 
> org.apache.drill.exec.expr.fn.FunctionImplementationRegistry.copyJarToLocal(FunctionImplementationRegistry.java:573)
> 2020-02-15T10:56:33.8626758Z  at 
> org.apache.drill.exec.expr.fn.FunctionImplementationRegistry.syncWithRemoteRegistry(FunctionImplementationRegistry.java:369)
> 2020-02-15T10:56:33.8627312Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.convertPlan(DrillSqlWorker.java:135)
> 2020-02-15T10:56:33.8627544Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:93)
> 2020-02-15T10:56:33.8628086Z  at 
> org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:590)
> 2020-02-15T10:56:33.8628315Z  at 
> org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:275)
> 2020-02-15T10:56:33.8628522Z  at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> 2020-02-15T10:56:33.8628749Z  at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> 2020-02-15T10:56:33.8628961Z  at 
> java.base/java.lang.Thread.run(Thread.java:834)
> 2020-02-15T10:56:33.8629569Z Caused by: 
> org.apache.hadoop.util.Shell$ExitCodeException: chmod: cannot access 
> '/tmp/drill/udf/udf/local/.drill-custom-abs-sources.jar.crc': No such file or 
> directory
> 2020-02-15T10:56:33.8629777Z 
> 2020-02-15T10:56:33.8629975Z  at 
> org.apache.hadoop.util.Shell.runCommand(Shell.java:1008)
> 2020-02-15T10:56:33.8630183Z  at 
> org.apache.hadoop.util.Shell.run(Shell.java:901)
> 2020-02-15T10:56:33.8630396Z  at 
> org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:1213)
> 2020-02-15T10:56:33.8630618Z  at 
> org.apache.hadoop.util.Shell.execCommand(Shell.java:1307)
> 2020-02-15T10:56:33.8630813Z  at 
> org.apache.hadoop.util.Shell.execCommand(Shell.java:1289)
> 2020-02-15T10:56:33.8631031Z  at 
> org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:865)
> 2020-02-15T10:56:33.8631283Z  at 
> org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:252)
> 2020-02-15T10:56:33.8631519Z  at 
> org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:232)
> 2020-02-15T10:56:33.8631876Z  at 
> org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:331)
> 2020-02-15T10:56:33.8632094Z  at 
> org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:320)
> 2020-02-15T10:56:33.8632306Z  at 
> org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:351)
> 2020-02-15T10:56:33.8632528Z  at 
> org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:405)
> 2020-02-15T10:56:33.8632748Z  at 
> org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:464)
> 2020-02-15T10:56:33.8632961Z  at 
> org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:443)
> 2020-02-15T10:56:33.8633171Z  at 
> org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1118)
> 2020-02-15T10:56:33.8633380Z  at 
> org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1098)
> 2020-02-15T10:56:33.8633580Z  at 
> org.apache.hadoop.fs.FileSystem.create(FileSystem.java:987)
> 2020-02-15T10:56:33.8633780Z  at 
> org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:414)
> 2020-02-15T10:56:33.8633986Z  at 
> org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:387)
> 2020-02-15T10:56:33.8634187Z  at 
> org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:337)
> 2020-02-15T10:56:33.8634398Z  at 
> org.apache.hadoop.fs.LocalFileSystem.copyToLocalFile(LocalFileSystem.java:88)
> 2020-02-15T10:56:33.8634613Z  at 
> org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:2379)
> 2020-02-15T10:56:33.8634845Z  at 
> org.apache.drill.exec.expr.fn.FunctionImplementationRegistry.copyJarToLocal(FunctionImplementationRegistry.java:569)
> 2020-02-15T10:56:33.8635063Z  ... 8 common frames omitted
> 2020-02-15T10:56:34.8987578Z 10:56:34.893 [Listener at localhost/35941] ERROR 
> org.apache.drill.TestReporter - Test Failed (d: 0 B(1.8 MiB), h: -9.1 
> MiB(593.5 MiB), nh: 4.3 MiB(408.2 MiB)): 
> testOverloadedFunctionPlanningStage(org.apache.drill.exec.udf.dynamic.TestDynamicUDFSupport)
> 2020-02-15T10:56:34.8988146Z org.apache.drill.exec.rpc.RpcException: 
> org.apache.drill.common.exceptions.UserRemoteException: FUNCTION ERROR: ABS 
> does not support operand types (CHAR,CHAR)
> 2020-02-15T10:56:34.8988378Z 
> 2020-02-15T10:56:34.8988535Z 
> 2020-02-15T10:56:34.8988965Z [Error Id: a9457be6-7fb3-4687-9c97-3b26eb524a9e ]
> 2020-02-15T10:56:34.8989194Z  at 
> org.apache.drill.exec.rpc.RpcException.mapException(RpcException.java:60)
> 2020-02-15T10:56:34.8989425Z  at 
> org.apache.drill.exec.client.DrillClient$ListHoldingResultsListener.getResults(DrillClient.java:881)
> 2020-02-15T10:56:34.8989940Z  at 
> org.apache.drill.exec.client.DrillClient.runQuery(DrillClient.java:583)
> 2020-02-15T10:56:34.8990182Z  at 
> org.apache.drill.test.BaseTestQuery.testRunAndReturn(BaseTestQuery.java:343)
> 2020-02-15T10:56:34.8990395Z  at 
> org.apache.drill.test.BaseTestQuery$ClassicTestServices.testRunAndReturn(BaseTestQuery.java:277)
> 2020-02-15T10:56:34.8990623Z  at 
> org.apache.drill.test.DrillTestWrapper.testRunAndReturn(DrillTestWrapper.java:938)
> 2020-02-15T10:56:34.8990856Z  at 
> org.apache.drill.test.DrillTestWrapper.compareUnorderedResults(DrillTestWrapper.java:533)
> 2020-02-15T10:56:34.8991072Z  at 
> org.apache.drill.test.DrillTestWrapper.run(DrillTestWrapper.java:172)
> 2020-02-15T10:56:34.8991304Z  at 
> org.apache.drill.test.TestBuilder.go(TestBuilder.java:145)
> 2020-02-15T10:56:34.8991535Z  at 
> org.apache.drill.exec.udf.dynamic.TestDynamicUDFSupport.testOverloadedFunctionPlanningStage(TestDynamicUDFSupport.java:528)
> 2020-02-15T10:56:34.8991757Z  at 
> java.base/java.lang.Thread.run(Thread.java:834)
> 2020-02-15T10:56:34.8991988Z Caused by: 
> org.apache.drill.common.exceptions.UserRemoteException: FUNCTION ERROR: ABS 
> does not support operand types (CHAR,CHAR)
> 2020-02-15T10:56:34.8992147Z 
> 2020-02-15T10:56:34.8992289Z 
> 2020-02-15T10:56:34.8992714Z [Error Id: a9457be6-7fb3-4687-9c97-3b26eb524a9e ]
> 2020-02-15T10:56:34.8992946Z  at 
> org.apache.drill.exec.rpc.user.QueryResultHandler.resultArrived(QueryResultHandler.java:125)
> 2020-02-15T10:56:34.8993159Z  at 
> org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:422)
> 2020-02-15T10:56:34.8993475Z  at 
> org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:96)
> 2020-02-15T10:56:34.8993677Z  at 
> org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:273)
> 2020-02-15T10:56:34.8993894Z  at 
> org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:243)
> 2020-02-15T10:56:34.8994118Z  at 
> io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:88)
> 2020-02-15T10:56:34.8994356Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
> 2020-02-15T10:56:34.8994590Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
> 2020-02-15T10:56:34.8994820Z  at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
> 2020-02-15T10:56:34.8995046Z  at 
> io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287)
> 2020-02-15T10:56:34.8995276Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
> 2020-02-15T10:56:34.8995504Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
> 2020-02-15T10:56:34.8995730Z  at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
> 2020-02-15T10:56:34.8995959Z  at 
> io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102)
> 2020-02-15T10:56:34.8996194Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
> 2020-02-15T10:56:34.8996420Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
> 2020-02-15T10:56:34.8996646Z  at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
> 2020-02-15T10:56:34.8996853Z  at 
> io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:312)
> 2020-02-15T10:56:34.8997081Z  at 
> io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:286)
> 2020-02-15T10:56:34.8997306Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
> 2020-02-15T10:56:34.8997527Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
> 2020-02-15T10:56:34.8997755Z  at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
> 2020-02-15T10:56:34.8998047Z  at 
> io.netty.channel.ChannelInboundHandlerAdapter.channelRead(ChannelInboundHandlerAdapter.java:86)
> 2020-02-15T10:56:34.8998303Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
> 2020-02-15T10:56:34.9000135Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
> 2020-02-15T10:56:34.9006440Z  at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
> 2020-02-15T10:56:34.9012516Z  at 
> io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294)
> 2020-02-15T10:56:34.9018949Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
> 2020-02-15T10:56:34.9026481Z  at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
> 2020-02-15T10:56:34.9032092Z  at 
> io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911)
> 2020-02-15T10:56:34.9038070Z  at 
> io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
> 2020-02-15T10:56:34.9043048Z  at 
> io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:645)
> 2020-02-15T10:56:34.9048461Z  at 
> io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:580)
> 2020-02-15T10:56:34.9053546Z  at 
> io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:497)
> 2020-02-15T10:56:34.9057905Z  at 
> io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:459)
> 2020-02-15T10:56:34.9063735Z  at 
> io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:131)
> 2020-02-15T10:56:34.9066312Z  ... 1 common frames omitted
> 2020-02-15T10:56:34.9072014Z Caused by: java.lang.Exception: FUNCTION ERROR: 
> ABS does not support operand types (CHAR,CHAR)
> 2020-02-15T10:56:34.9072730Z 
> 2020-02-15T10:56:34.9073400Z 
> 2020-02-15T10:56:34.9077321Z [Error Id: a9457be6-7fb3-4687-9c97-3b26eb524a9e ]
> 2020-02-15T10:56:34.9082862Z  at 
> org.apache.drill.common.exceptions.UserException$Builder.build(UserException.java:653)
> 2020-02-15T10:56:34.9089264Z  at 
> org.apache.drill.exec.planner.sql.TypeInferenceUtils.resolveDrillFuncHolder(TypeInferenceUtils.java:915)
> 2020-02-15T10:56:34.9095494Z  at 
> org.apache.drill.exec.planner.sql.TypeInferenceUtils.access$2200(TypeInferenceUtils.java:61)
> 2020-02-15T10:56:34.9103325Z  at 
> org.apache.drill.exec.planner.sql.TypeInferenceUtils$DrillDefaultSqlReturnTypeInference.inferReturnType(TypeInferenceUtils.java:351)
> 2020-02-15T10:56:34.9108469Z  at 
> org.apache.calcite.sql.SqlOperator.inferReturnType(SqlOperator.java:470)
> 2020-02-15T10:56:34.9113503Z  at 
> org.apache.calcite.sql.SqlOperator.validateOperands(SqlOperator.java:437)
> 2020-02-15T10:56:34.9118635Z  at 
> org.apache.calcite.sql.SqlFunction.deriveType(SqlFunction.java:314)
> 2020-02-15T10:56:34.9122708Z  at 
> org.apache.calcite.sql.SqlFunction.deriveType(SqlFunction.java:218)
> 2020-02-15T10:56:34.9129054Z  at 
> org.apache.drill.exec.planner.sql.DrillCalciteSqlFunctionWrapper.deriveType(DrillCalciteSqlFunctionWrapper.java:136)
> 2020-02-15T10:56:34.9135035Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl$DeriveTypeVisitor.visit(SqlValidatorImpl.java:5640)
> 2020-02-15T10:56:34.9140573Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl$DeriveTypeVisitor.visit(SqlValidatorImpl.java:5627)
> 2020-02-15T10:56:34.9143978Z  at 
> org.apache.calcite.sql.SqlCall.accept(SqlCall.java:139)
> 2020-02-15T10:56:34.9149720Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.deriveTypeImpl(SqlValidatorImpl.java:1692)
> 2020-02-15T10:56:34.9154964Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.deriveType(SqlValidatorImpl.java:1677)
> 2020-02-15T10:56:34.9159402Z  at 
> org.apache.calcite.sql.SqlAsOperator.deriveType(SqlAsOperator.java:133)
> 2020-02-15T10:56:34.9168373Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl$DeriveTypeVisitor.visit(SqlValidatorImpl.java:5640)
> 2020-02-15T10:56:34.9169071Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl$DeriveTypeVisitor.visit(SqlValidatorImpl.java:5627)
> 2020-02-15T10:56:34.9169236Z  at 
> org.apache.calcite.sql.SqlCall.accept(SqlCall.java:139)
> 2020-02-15T10:56:34.9169376Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.deriveTypeImpl(SqlValidatorImpl.java:1692)
> 2020-02-15T10:56:34.9173087Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.deriveType(SqlValidatorImpl.java:1677)
> 2020-02-15T10:56:34.9178523Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.expandSelectItem(SqlValidatorImpl.java:480)
> 2020-02-15T10:56:34.9184532Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.validateSelectList(SqlValidatorImpl.java:4117)
> 2020-02-15T10:56:34.9190012Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.validateSelect(SqlValidatorImpl.java:3396)
> 2020-02-15T10:56:34.9195435Z  at 
> org.apache.calcite.sql.validate.SelectNamespace.validateImpl(SelectNamespace.java:60)
> 2020-02-15T10:56:34.9200325Z  at 
> org.apache.calcite.sql.validate.AbstractNamespace.validate(AbstractNamespace.java:84)
> 2020-02-15T10:56:34.9206518Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.validateNamespace(SqlValidatorImpl.java:1009)
> 2020-02-15T10:56:34.9211094Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.validateQuery(SqlValidatorImpl.java:969)
> 2020-02-15T10:56:34.9214805Z  at 
> org.apache.calcite.sql.SqlSelect.validate(SqlSelect.java:216)
> 2020-02-15T10:56:34.9220461Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.validateScopedExpression(SqlValidatorImpl.java:944)
> 2020-02-15T10:56:34.9225856Z  at 
> org.apache.calcite.sql.validate.SqlValidatorImpl.validate(SqlValidatorImpl.java:651)
> 2020-02-15T10:56:34.9230932Z  at 
> org.apache.drill.exec.planner.sql.conversion.SqlConverter.validate(SqlConverter.java:189)
> 2020-02-15T10:56:34.9236877Z  at 
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.validateNode(DefaultSqlHandler.java:648)
> 2020-02-15T10:56:34.9242560Z  at 
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.validateAndConvert(DefaultSqlHandler.java:196)
> 2020-02-15T10:56:34.9247987Z  at 
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan(DefaultSqlHandler.java:170)
> 2020-02-15T10:56:34.9252877Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan(DrillSqlWorker.java:283)
> 2020-02-15T10:56:34.9258181Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.getPhysicalPlan(DrillSqlWorker.java:163)
> 2020-02-15T10:56:34.9263230Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.convertPlan(DrillSqlWorker.java:140)
> 2020-02-15T10:56:34.9268395Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:93)
> 2020-02-15T10:56:34.9272404Z  at 
> org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:590)
> 2020-02-15T10:56:34.9276385Z  at 
> org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:275)
> 2020-02-15T10:56:34.9277449Z  at .......(:0)
> 2020-02-15T10:56:37.4255276Z 10:56:37.422 
> [21b83199-9e04-d50c-3ed5-7a42ccec4583:foreman] ERROR 
> o.a.d.e.p.s.h.DropFunctionHandler - Error during UDF unregistration
> 2020-02-15T10:56:37.4255628Z 
> org.apache.drill.common.exceptions.DrillRuntimeException: Failed to update 
> remote function registry. Exceeded retry attempts limit.
> 2020-02-15T10:56:37.4255962Z  at 
> org.apache.drill.exec.planner.sql.handlers.DropFunctionHandler.unregister(DropFunctionHandler.java:149)
> 2020-02-15T10:56:37.4256215Z  at 
> org.apache.drill.exec.planner.sql.handlers.DropFunctionHandler.getPlan(DropFunctionHandler.java:87)
> 2020-02-15T10:56:37.4268781Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan(DrillSqlWorker.java:283)
> 2020-02-15T10:56:37.4269190Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.getPhysicalPlan(DrillSqlWorker.java:163)
> 2020-02-15T10:56:37.4269418Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.convertPlan(DrillSqlWorker.java:128)
> 2020-02-15T10:56:37.4269638Z  at 
> org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:93)
> 2020-02-15T10:56:37.4269855Z  at 
> org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:590)
> 2020-02-15T10:56:37.4270061Z  at 
> org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:275)
> 2020-02-15T10:56:37.4270698Z  at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> 2020-02-15T10:56:37.4270977Z  at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> 2020-02-15T10:56:37.4271187Z  at 
> java.base/java.lang.Thread.run(Thread.java:834)
> {noformat}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to