This is an automated email from the ASF dual-hosted git repository.

sorabh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git


The following commit(s) were added to refs/heads/master by this push:
     new 4fd9cba  DRILL-6624: Fix loss of the table row type when the same 
schema name was specified as single path and as a complex path in the same query
4fd9cba is described below

commit 4fd9cba1b3cc3b086a8f7cb1e25d7464f0db07b9
Author: Volodymyr Vysotskyi <[email protected]>
AuthorDate: Sat Jul 21 03:22:28 2018 +0300

    DRILL-6624: Fix loss of the table row type when the same schema name was 
specified as single path and as a complex path in the same query
    
    closes #1390
---
 .../org/apache/calcite/jdbc/DynamicRootSchema.java | 32 ++++++++++++----------
 .../drill/exec/physical/impl/TestSchema.java       | 29 ++++++++++++++++++++
 2 files changed, 46 insertions(+), 15 deletions(-)

diff --git 
a/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java 
b/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java
index e6b8f49..5fecfdd 100644
--- 
a/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java
+++ 
b/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java
@@ -17,7 +17,6 @@
  */
 package org.apache.calcite.jdbc;
 
-import com.google.common.collect.Lists;
 import org.apache.calcite.DataContext;
 
 import org.apache.calcite.linq4j.tree.Expression;
@@ -33,6 +32,7 @@ import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.SubSchemaWrapper;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.List;
 
 /**
@@ -70,16 +70,16 @@ public class DynamicRootSchema extends DynamicSchema {
   }
 
   /**
-   * load schema factory(storage plugin) for schemaName
-   * @param schemaName
-   * @param caseSensitive
+   * Loads schema factory(storage plugin) for specified {@code schemaName}
+   * @param schemaName the name of the schema
+   * @param caseSensitive whether matching for the schema name is case 
sensitive
    */
   public void loadSchemaFactory(String schemaName, boolean caseSensitive) {
     try {
-      SchemaPlus thisPlus = this.plus();
+      SchemaPlus schemaPlus = this.plus();
       StoragePlugin plugin = getSchemaFactories().getPlugin(schemaName);
       if (plugin != null) {
-        plugin.registerSchemas(schemaConfig, thisPlus);
+        plugin.registerSchemas(schemaConfig, schemaPlus);
         return;
       }
 
@@ -91,15 +91,17 @@ public class DynamicRootSchema extends DynamicSchema {
           return;
         }
 
-        // Found the storage plugin for first part(e.g. 'dfs') of schemaName 
(e.g. 'dfs.tmp')
-        // register schema for this storage plugin to 'this'.
-        plugin.registerSchemas(schemaConfig, thisPlus);
-
+        // Looking for the SchemaPlus for the top level (e.g. 'dfs') of 
schemaName (e.g. 'dfs.tmp')
+        SchemaPlus firstLevelSchema = schemaPlus.getSubSchema(paths.get(0));
+        if (firstLevelSchema == null) {
+          // register schema for this storage plugin to 'this'.
+          plugin.registerSchemas(schemaConfig, schemaPlus);
+          firstLevelSchema = schemaPlus.getSubSchema(paths.get(0));
+        }
         // Load second level schemas for this storage plugin
-        final SchemaPlus firstlevelSchema = 
thisPlus.getSubSchema(paths.get(0));
-        final List<SchemaPlus> secondLevelSchemas = Lists.newArrayList();
-        for (String secondLevelSchemaName : 
firstlevelSchema.getSubSchemaNames()) {
-          
secondLevelSchemas.add(firstlevelSchema.getSubSchema(secondLevelSchemaName));
+        List<SchemaPlus> secondLevelSchemas = new ArrayList<>();
+        for (String secondLevelSchemaName : 
firstLevelSchema.getSubSchemaNames()) {
+          
secondLevelSchemas.add(firstLevelSchema.getSubSchema(secondLevelSchemaName));
         }
 
         for (SchemaPlus schema : secondLevelSchemas) {
@@ -110,7 +112,7 @@ public class DynamicRootSchema extends DynamicSchema {
             throw new RuntimeException(String.format("Schema '%s' is not 
expected under root schema", schema.getName()));
           }
           SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
-          thisPlus.add(wrapper.getName(), wrapper);
+          schemaPlus.add(wrapper.getName(), wrapper);
         }
       }
     } catch(ExecutionSetupException | IOException ex) {
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestSchema.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestSchema.java
index 9282eed..986925f 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestSchema.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestSchema.java
@@ -84,4 +84,33 @@ public class TestSchema extends DrillTest {
     }
   }
 
+  @Test
+  public void testLocal() throws Exception {
+    try {
+      client.queryBuilder()
+          .sql("create table dfs.tmp.t1 as select 1 as c1")
+          .run();
+
+      client.queryBuilder()
+          .sql("create table dfs.tmp.t2 as select 1 as c1")
+          .run();
+
+      client.testBuilder()
+          .sqlQuery("select a.c1 from dfs.tmp.`t1` a\n" +
+            "join `dfs.tmp`.`t2` b ON b.c1 = a.c1\n")
+          .unOrdered()
+          .baselineColumns("c1")
+          .baselineValues(1)
+          .go();
+    } finally {
+      client.queryBuilder()
+          .sql("drop table if exists `dfs.tmp`.t1")
+          .run();
+
+      client.queryBuilder()
+          .sql("drop table if exists dfs.tmp.t2")
+          .run();
+    }
+  }
+
 }

Reply via email to