DRILL-623: Unable to run query containing quoted schema name
Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/d414d5dc Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/d414d5dc Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/d414d5dc Branch: refs/heads/master Commit: d414d5dccbda7b901260d66448367678bdced235 Parents: c5f5e28 Author: vkorukanti <venki.koruka...@gmail.com> Authored: Mon May 12 17:33:41 2014 -0700 Committer: Aditya Kishore <adi...@maprtech.com> Committed: Mon May 12 21:33:34 2014 -0700 ---------------------------------------------------------------------- .../apache/drill/exec/store/AbstractSchema.java | 4 + .../drill/exec/store/StoragePluginRegistry.java | 48 ++++++++++- .../drill/exec/store/SubSchemaWrapper.java | 87 ++++++++++++++++++++ .../exec/store/dfs/FileSystemSchemaFactory.java | 5 ++ .../store/hive/schema/HiveSchemaFactory.java | 4 + .../drill/exec/store/ischema/OptiqProvider.java | 21 +++-- .../apache/drill/jdbc/test/TestJdbcQuery.java | 53 +++++++++--- 7 files changed, 204 insertions(+), 18 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d414d5dc/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java ---------------------------------------------------------------------- diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java index 67b4ad9..485136b 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java @@ -80,6 +80,10 @@ public abstract class AbstractSchema implements Schema{ throw new UnsupportedOperationException("New tables are not allowed in this schema"); } + public boolean showInInformationSchema() { + return true; + } + @Override public Collection<Function> getFunctions(String name) { return Collections.emptyList(); http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d414d5dc/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java ---------------------------------------------------------------------- diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java index 1459cfa..b8d56ce 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java @@ -24,10 +24,12 @@ import java.net.URL; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import com.google.common.collect.Lists; import net.hydromatic.linq4j.expressions.DefaultExpression; import net.hydromatic.linq4j.expressions.Expression; import net.hydromatic.optiq.SchemaPlus; @@ -43,7 +45,6 @@ import org.apache.drill.exec.cache.DistributedMap; import org.apache.drill.exec.exception.DrillbitStartupException; import org.apache.drill.exec.planner.logical.DrillRuleSets; import org.apache.drill.exec.planner.logical.StoragePlugins; -import org.apache.drill.exec.rpc.user.DrillUser; import org.apache.drill.exec.rpc.user.UserSession; import org.apache.drill.exec.server.DrillbitContext; import org.apache.drill.exec.store.dfs.FileSystemPlugin; @@ -229,6 +230,51 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage for(Map.Entry<String, StoragePlugin> e : plugins.entrySet()){ e.getValue().registerSchemas(session, parent); } + + // Add second level schema as top level schema with name qualified with parent schema name + // Ex: "dfs" schema has "default" and "tmp" as sub schemas. Add following extra schemas "dfs.default" and + // "dfs.tmp" under root schema. + // + // Before change, schema tree looks like below: + // "root" + // -- "dfs" + // -- "default" + // -- "tmp" + // -- "hive" + // -- "default" + // -- "hivedb1" + // + // After the change, the schema tree looks like below: + // "root" + // -- "dfs" + // -- "default" + // -- "tmp" + // -- "dfs.default" + // -- "dfs.tmp" + // -- "hive" + // -- "default" + // -- "hivedb1" + // -- "hive.default" + // -- "hive.hivedb1" + List<SchemaPlus> secondLevelSchemas = Lists.newArrayList(); + for(String firstLevelSchemaName : parent.getSubSchemaNames()) { + SchemaPlus firstLevelSchema = parent.getSubSchema(firstLevelSchemaName); + for(String secondLevelSchemaName : firstLevelSchema.getSubSchemaNames()) { + secondLevelSchemas.add(firstLevelSchema.getSubSchema(secondLevelSchemaName)); + } + } + + for(SchemaPlus schema : secondLevelSchemas) { + AbstractSchema drillSchema; + try { + drillSchema = schema.unwrap(AbstractSchema.class); + } catch(ClassCastException e) { + throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName())); + } + + SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema); + parent.add(wrapper.getName(), wrapper); + } } } http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d414d5dc/exec/java-exec/src/main/java/org/apache/drill/exec/store/SubSchemaWrapper.java ---------------------------------------------------------------------- diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SubSchemaWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SubSchemaWrapper.java new file mode 100644 index 0000000..576d761 --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SubSchemaWrapper.java @@ -0,0 +1,87 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec.store; + +import com.google.common.collect.ImmutableList; +import net.hydromatic.optiq.Function; +import net.hydromatic.optiq.Schema; +import net.hydromatic.optiq.Table; +import org.apache.drill.exec.planner.logical.CreateTableEntry; + +import java.util.Collection; +import java.util.Set; + +public class SubSchemaWrapper extends AbstractSchema { + + private final AbstractSchema innerSchema; + + public SubSchemaWrapper(AbstractSchema innerSchema) { + super(ImmutableList.<String>of(), innerSchema.getFullSchemaName()); + this.innerSchema = innerSchema; + } + + @Override + public boolean showInInformationSchema() { + return false; + } + + @Override + public AbstractSchema getDefaultSchema() { + return innerSchema.getDefaultSchema(); + } + + @Override + public CreateTableEntry createNewTable(String tableName) { + return innerSchema.createNewTable(tableName); + } + + @Override + public Collection<Function> getFunctions(String name) { + return innerSchema.getFunctions(name); + } + + @Override + public Set<String> getFunctionNames() { + return innerSchema.getFunctionNames(); + } + + @Override + public Schema getSubSchema(String name) { + return innerSchema.getSubSchema(name); + } + + @Override + public Set<String> getSubSchemaNames() { + return innerSchema.getSubSchemaNames(); + } + + @Override + public boolean isMutable() { + return innerSchema.isMutable(); + } + + @Override + public Table getTable(String name) { + return innerSchema.getTable(name); + } + + @Override + public Set<String> getTableNames() { + return innerSchema.getTableNames(); + } +} http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d414d5dc/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemSchemaFactory.java ---------------------------------------------------------------------- diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemSchemaFactory.java index 28496eb..9a57aa2 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemSchemaFactory.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemSchemaFactory.java @@ -86,6 +86,11 @@ public class FileSystemSchemaFactory implements SchemaFactory{ } @Override + public boolean showInInformationSchema() { + return false; + } + + @Override public Table getTable(String name) { return defaultSchema.getTable(name); } http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d414d5dc/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java ---------------------------------------------------------------------- diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java index cff6bb1..6dd6143 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/schema/HiveSchemaFactory.java @@ -212,6 +212,10 @@ public class HiveSchemaFactory implements SchemaFactory { } } + @Override + public boolean showInInformationSchema() { + return false; + } @Override public Set<String> getSubSchemaNames() { http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d414d5dc/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/OptiqProvider.java ---------------------------------------------------------------------- diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/OptiqProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/OptiqProvider.java index cb4f3d9..e12e5d6 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/OptiqProvider.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/OptiqProvider.java @@ -22,6 +22,7 @@ import net.hydromatic.optiq.SchemaPlus; import net.hydromatic.optiq.Table; import net.hydromatic.optiq.jdbc.JavaTypeFactoryImpl; +import org.apache.drill.exec.store.AbstractSchema; import org.eigenbase.reltype.RelDataType; import org.eigenbase.reltype.RelDataTypeField; import org.eigenbase.sql.type.SqlTypeName; @@ -58,8 +59,8 @@ public class OptiqProvider { */ static public class Schemata extends Abstract { @Override - public boolean visitSchema(String schemaName, Schema schema) { - if (schemaName != null && schemaName != "") { + public boolean visitSchema(String schemaName, SchemaPlus schema) { + if (shouldVisitSchema(schema) && schemaName != null && schemaName != "") { writeRow("DRILL", schemaName, "<owner>"); } return false; @@ -173,12 +174,22 @@ public class OptiqProvider { * If the schema visitor returns true, then visit the tables. * If the table visitor returns true, then visit the fields (columns). */ - public boolean visitSchema(String schemaName, Schema schema){return true;} + public boolean visitSchema(String schemaName, SchemaPlus schema) { + return shouldVisitSchema(schema); + } public boolean visitTableName(String schemaName, String tableName){return true;} public boolean visitTable(String schemaName, String tableName, Table table){return true;} public boolean visitField(String schemaName, String tableName, RelDataTypeField field){return true;} - + protected boolean shouldVisitSchema(SchemaPlus schema) { + try { + AbstractSchema drillSchema = schema.unwrap(AbstractSchema.class); + return drillSchema.showInInformationSchema(); + } catch(ClassCastException e) { + // ignore and return true as this is not a drill schema + } + return true; + } /** * Start scanning an Optiq Schema. @@ -194,7 +205,7 @@ public class OptiqProvider { * @param schema - the current schema. * @param visitor - the methods to invoke at each entity in the schema. */ - private void scanSchema(String schemaPath, Schema schema) { + private void scanSchema(String schemaPath, SchemaPlus schema) { // If we have an empty schema path, then don't insert a leading dot. String separator; http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/d414d5dc/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java ---------------------------------------------------------------------- diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java index d087f7d..2d26b17 100644 --- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java +++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java @@ -285,8 +285,6 @@ public class TestJdbcQuery extends JdbcTest{ "TABLE_SCHEMA=hive.default; TABLE_NAME=kv\n" + "TABLE_SCHEMA=hive.default; TABLE_NAME=foodate\n" + "TABLE_SCHEMA=hive.db1; TABLE_NAME=kv_db1\n" + - "TABLE_SCHEMA=hive; TABLE_NAME=kv\n" + - "TABLE_SCHEMA=hive; TABLE_NAME=foodate\n" + "TABLE_SCHEMA=sys; TABLE_NAME=drillbits\n" + "TABLE_SCHEMA=sys; TABLE_NAME=options\n" + "TABLE_SCHEMA=INFORMATION_SCHEMA; TABLE_NAME=VIEWS\n" + @@ -310,10 +308,10 @@ public class TestJdbcQuery extends JdbcTest{ ); JdbcAssert.withNoDefaultSchema() - .sql("SHOW TABLES IN hive") + .sql("SHOW TABLES IN hive.`default`") .returns( - "TABLE_SCHEMA=hive; TABLE_NAME=kv\n" + - "TABLE_SCHEMA=hive; TABLE_NAME=foodate\n"); + "TABLE_SCHEMA=hive.default; TABLE_NAME=kv\n" + + "TABLE_SCHEMA=hive.default; TABLE_NAME=foodate\n"); } @Test @@ -335,13 +333,10 @@ public class TestJdbcQuery extends JdbcTest{ String expected = "SCHEMA_NAME=hive.default\n" + "SCHEMA_NAME=hive.db1\n" + - "SCHEMA_NAME=hive\n" + "SCHEMA_NAME=dfs.home\n" + "SCHEMA_NAME=dfs.default\n" + "SCHEMA_NAME=dfs.tmp\n" + - "SCHEMA_NAME=dfs\n" + "SCHEMA_NAME=cp.default\n" + - "SCHEMA_NAME=cp\n" + "SCHEMA_NAME=sys\n" + "SCHEMA_NAME=INFORMATION_SCHEMA\n"; @@ -352,8 +347,8 @@ public class TestJdbcQuery extends JdbcTest{ @Test public void testShowDatabasesWhere() throws Exception{ JdbcAssert.withNoDefaultSchema() - .sql("SHOW DATABASES WHERE SCHEMA_NAME='dfs'") - .returns("SCHEMA_NAME=dfs\n"); + .sql("SHOW DATABASES WHERE SCHEMA_NAME='dfs.tmp'") + .returns("SCHEMA_NAME=dfs.tmp\n"); } @Test @@ -362,8 +357,7 @@ public class TestJdbcQuery extends JdbcTest{ .sql("SHOW DATABASES LIKE '%i%'") .returns( "SCHEMA_NAME=hive.default\n"+ - "SCHEMA_NAME=hive.db1\n"+ - "SCHEMA_NAME=hive\n" + "SCHEMA_NAME=hive.db1" ); } @@ -859,4 +853,39 @@ public class TestJdbcQuery extends JdbcTest{ } }); } + + // Tests using backticks around the complete schema path + // select * from `dfs.tmp`.`/tmp/nation.parquet`; + @Test + public void testCompleteSchemaRef1() throws Exception { + testQuery("select * from `cp.default`.`employee.json` limit 2"); + } + + @Test + public void testCompleteSchemaRef2() throws Exception { + JdbcAssert.withNoDefaultSchema().withConnection(new Function<Connection, Void>() { + public Void apply(Connection connection) { + try { + Statement statement = connection.createStatement(); + + // change default schema + ResultSet resultSet = statement.executeQuery("USE `dfs.default`"); + String result = JdbcAssert.toString(resultSet).trim(); + String expected = "ok=true; summary=Default schema changed to 'dfs.default'"; + assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected), expected.equals(result)); + + resultSet = statement.executeQuery( + String.format("select R_REGIONKEY from `%s/../../sample-data/region.parquet` LIMIT 1", WORKING_PATH)); + result = JdbcAssert.toString(resultSet).trim(); + expected = "R_REGIONKEY=0"; + assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected), + expected.equals(result)); + + return null; + } catch (Exception e) { + throw new RuntimeException(e); + } + } + }); + } }