[
https://issues.apache.org/jira/browse/DRILL-4514?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15372611#comment-15372611
]
ASF GitHub Bot commented on DRILL-4514:
---------------------------------------
Github user arina-ielchiieva commented on a diff in the pull request:
https://github.com/apache/drill/pull/436#discussion_r70406745
--- Diff:
exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java
---
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.sql.handlers;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.SerializableString;
+import com.fasterxml.jackson.core.io.CharacterEscapes;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Joiner;
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.calcite.sql.SqlIdentifier;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.physical.PhysicalPlan;
+import org.apache.drill.exec.planner.sql.DirectPlan;
+import org.apache.drill.exec.planner.sql.SchemaUtilites;
+import org.apache.drill.exec.planner.sql.parser.SqlDescribeSchema;
+import org.apache.drill.exec.store.StoragePlugin;
+import org.apache.drill.exec.store.dfs.FileSystemSchemaFactory;
+import org.apache.drill.exec.store.dfs.WorkspaceConfig;
+
+import java.util.List;
+import java.util.Map;
+
+import static
com.fasterxml.jackson.databind.SerializationFeature.INDENT_OUTPUT;
+
+public class DescribeSchemaHandler extends DefaultSqlHandler {
+
+ public DescribeSchemaHandler(SqlHandlerConfig config) {
+ super(config);
+ }
+
+ private static final org.slf4j.Logger logger =
org.slf4j.LoggerFactory.getLogger(DescribeSchemaHandler.class);
+ private static final ObjectMapper mapper = new ObjectMapper(new
ObjectMapper().getFactory().setCharacterEscapes(new CharacterEscapes() {
+ @Override
+ public int[] getEscapeCodesForAscii() {
+ // add standard set of escaping characters
+ int[] esc = CharacterEscapes.standardAsciiEscapesForJSON();
+ // don't escape backslash (not to corrupt windows path)
+ esc['\\'] = CharacterEscapes.ESCAPE_NONE;
+ return esc;
+ }
+
+ @Override
+ public SerializableString getEscapeSequence(int i) {
+ // no further escaping (beyond ASCII chars) needed
+ return null;
+ }
+ })).enable(INDENT_OUTPUT);
+
+
+ @Override
+ public PhysicalPlan getPlan(SqlNode sqlNode) {
+ SqlIdentifier schema = ((SqlDescribeSchema) sqlNode).getSchema();
+ SchemaPlus drillSchema =
SchemaUtilites.findSchema(config.getConverter().getDefaultSchema(),
schema.names);
+
+ if (drillSchema != null) {
+ StoragePlugin storagePlugin;
+ try {
+ storagePlugin =
context.getStorage().getPlugin(schema.names.get(0));
+ } catch (ExecutionSetupException e) {
+ throw UserException.validationError()
+ .message("Failure while retrieving storage plugin", e)
+ .build(logger);
+ }
+ String properties;
+ try {
+ properties = getPropertiesAsJsonString(schema.names,
storagePlugin.getConfig());
+ } catch (JsonProcessingException e) {
+ throw UserException.parseError()
+ .message("Error while trying to convert storage config to json
string")
+ .build(logger);
+ }
+ return DirectPlan.createDirectPlan(context, new
DescribeSchemaResult(Joiner.on(".").join(schema.names), properties));
+ }
+
+ throw UserException.validationError()
+ .message(String.format("Invalid schema name [%s]",
Joiner.on(".").join(schema.names)))
+ .build(logger);
+ }
+
+ /**
+ * Converts schema config properties to json string. If storage plugin
has several workspaces, picks appropriate one.
+ */
+ private String getPropertiesAsJsonString(List<String> names,
StoragePluginConfig config) throws JsonProcessingException {
+ final Map configMap = mapper.convertValue(config, Map.class);
+ Object workspaces = configMap.remove("workspaces");
+ if (workspaces != null) {
+ Map map = (Map) workspaces;
+ String key = names.size() > 1 ? names.get(1) :
FileSystemSchemaFactory.DEFAULT_WS_NAME;
--- End diff --
Yes, I assume we have full path to schema name.
I will create Jira where we will add support to for
use dfs;
DESCRIBE schema tmp; // which will get the properties of dfs.tmp;
as enhancement to describe schema functionality.
> Add describe schema <schema_name> command
> -----------------------------------------
>
> Key: DRILL-4514
> URL: https://issues.apache.org/jira/browse/DRILL-4514
> Project: Apache Drill
> Issue Type: New Feature
> Affects Versions: Future
> Reporter: Arina Ielchiieva
> Assignee: Arina Ielchiieva
>
> Add describe database <db_name> command which will return directory
> associated with a database on the fly.
> Syntax:
> describe database <db_name>
> describe schema <schema_name>
> Output:
> {code:sql}
> DESCRIBE SCHEMA dfs.tmp;
> {code}
> {noformat}
> +--------+------------+
> | schema | properties |
> +--------+------------+
> | dfs.tmp | {
> "type" : "file",
> "enabled" : true,
> "connection" : "file:///",
> "config" : null,
> "formats" : {
> "psv" : {
> "type" : "text",
> "extensions" : [ "tbl" ],
> "delimiter" : "|"
> },
> "csv" : {
> "type" : "text",
> "extensions" : [ "csv" ],
> "delimiter" : ","
> },
> "tsv" : {
> "type" : "text",
> "extensions" : [ "tsv" ],
> "delimiter" : "\t"
> },
> "parquet" : {
> "type" : "parquet"
> },
> "json" : {
> "type" : "json",
> "extensions" : [ "json" ]
> },
> "avro" : {
> "type" : "avro"
> },
> "sequencefile" : {
> "type" : "sequencefile",
> "extensions" : [ "seq" ]
> },
> "csvh" : {
> "type" : "text",
> "extensions" : [ "csvh" ],
> "extractHeader" : true,
> "delimiter" : ","
> }
> },
> "location" : "/tmp",
> "writable" : true,
> "defaultInputFormat" : null
> } |
> +--------+------------+
> {noformat}
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)