zhangbutao commented on code in PR #3288: URL: https://github.com/apache/hive/pull/3288#discussion_r1959231842
########## ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationOperation.java: ########## @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter.location; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogOperation; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +import java.net.URI; +import java.net.URISyntaxException; + +/** + * Operation process of altering a catalog's location. + */ +public class AlterCatalogSetLocationOperation extends AbstractAlterCatalogOperation<AlterCatalogSetLocationDesc> { + public AlterCatalogSetLocationOperation(DDLOperationContext context, AlterCatalogSetLocationDesc desc) { + super(context, desc); + } + + @Override + protected void doAlteration(Catalog catalog) throws HiveException { + try { + String newLocation = Utilities.getQualifiedPath(context.getConf(), new Path(desc.getLocation())); + + URI locationURI = new URI(newLocation); + if (!locationURI.isAbsolute() || StringUtils.isBlank(locationURI.getScheme())) { Review Comment: `locationURI.isAbsolute()` already implicitly checks the existence of scheme, so we could simplify this code: ```suggestion if (!locationURI.isAbsolute()) { ``` ########## ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogAnalyzer.java: ########## @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter; + +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for database alteration commands. Review Comment: ```suggestion * Analyzer for catalog alteration commands. ``` ########## ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationOperation.java: ########## @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter.location; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogOperation; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +import java.net.URI; +import java.net.URISyntaxException; + +/** + * Operation process of altering a catalog's location. + */ +public class AlterCatalogSetLocationOperation extends AbstractAlterCatalogOperation<AlterCatalogSetLocationDesc> { + public AlterCatalogSetLocationOperation(DDLOperationContext context, AlterCatalogSetLocationDesc desc) { + super(context, desc); + } + + @Override + protected void doAlteration(Catalog catalog) throws HiveException { + try { + String newLocation = Utilities.getQualifiedPath(context.getConf(), new Path(desc.getLocation())); + + URI locationURI = new URI(newLocation); + if (!locationURI.isAbsolute() || StringUtils.isBlank(locationURI.getScheme())) { + throw new HiveException(ErrorMsg.BAD_LOCATION_VALUE, newLocation); + } + + if (newLocation.equals(catalog.getLocationUri())) { + LOG.info("AlterCatalog skipped. No change in location."); + } else { + catalog.setLocationUri(newLocation); Review Comment: Please add a LOG in here: `LOG.info("Catalog location changed from {} to {}", catalog.getLocationUri(), newLocation);` ########## ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationOperation.java: ########## @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.alter.location; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogOperation; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +import java.net.URI; +import java.net.URISyntaxException; + +/** + * Operation process of altering a catalog's location. + */ +public class AlterCatalogSetLocationOperation extends AbstractAlterCatalogOperation<AlterCatalogSetLocationDesc> { + public AlterCatalogSetLocationOperation(DDLOperationContext context, AlterCatalogSetLocationDesc desc) { + super(context, desc); + } + + @Override + protected void doAlteration(Catalog catalog) throws HiveException { + try { + String newLocation = Utilities.getQualifiedPath(context.getConf(), new Path(desc.getLocation())); + + URI locationURI = new URI(newLocation); + if (!locationURI.isAbsolute() || StringUtils.isBlank(locationURI.getScheme())) { + throw new HiveException(ErrorMsg.BAD_LOCATION_VALUE, newLocation); + } + + if (newLocation.equals(catalog.getLocationUri())) { + LOG.info("AlterCatalog skipped. No change in location."); + } else { + catalog.setLocationUri(newLocation); + } + return; Review Comment: Remove the unnecessary `return`. ########## ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogAnalyzer.java: ########## @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.create; + +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for catalog creation commands. + */ +@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_CREATECATALOG) +public class CreateCatalogAnalyzer extends BaseSemanticAnalyzer { + public CreateCatalogAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String catalogName = unescapeIdentifier(root.getChild(0).getText()); + String locationUrl = unescapeSQLString(root.getChild(1).getChild(0).getText()); + outputs.add(toWriteEntity(locationUrl)); + + boolean ifNotExists = false; + String comment = null; + + for (int i = 2; i < root.getChildCount(); i++) { + ASTNode childNode = (ASTNode) root.getChild(i); + switch (childNode.getToken().getType()) { + case HiveParser.TOK_IFNOTEXISTS: + ifNotExists = true; + break; + case HiveParser.TOK_CATALOGCOMMENT: Review Comment: The bad code indentation ########## ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java: ########## @@ -1868,6 +1869,23 @@ public static Path tryQualifyPath(Path path, HiveConf conf) { } } + protected Catalog getCatalog(String catName) throws SemanticException { + return getCatalog(catName, true); + } + + protected Catalog getCatalog(String catName, boolean throwException) throws SemanticException { + Catalog catalog = null; + try { + catalog = db.getCatalog(catName); + } catch (Exception e) { + throw new SemanticException(e.getMessage(), e); + } + if (catalog == null && throwException) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS.getMsg(catName)); + } + return catalog; + } Review Comment: ```suggestion try { Catalog catalog = db.getCatalog(catName); if (catalog == null && throwException) { throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS.getMsg(catName)); } return catalog; } catch (MetaException | TException e) { throw new SemanticException("Failed to retrieve catalog " + catName + ": " + e.getMessage(), e); } } ``` ########## ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java: ########## @@ -2474,6 +2514,32 @@ public void validateDatabaseExists(String databaseName) throws SemanticException } } + public Catalog getCatalog(String catName) throws HiveException { + PerfLogger perfLogger = SessionState.getPerfLogger(); + perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_CATALOG); + try { + return getMSC().getCatalog(catName); + } catch (NoSuchObjectException e) { + return null; + } catch (Exception e) { + throw new HiveException(e); + } finally { + perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_CATALOG, "HS2-cache"); + } + } + + public void alterCatalog(String catName, Catalog catalog) throws HiveException { + try { + getMSC().alterCatalog(catName, catalog); + } catch (MetaException e) { + throw new HiveException("Unable to alter catalog " + catName + ". " + e.getMessage(), e); + } catch (NoSuchObjectException e) { + throw new HiveException("Catalog " + catName + " does not exists.", e); + } catch (TException e) { + throw new HiveException("Unable to alter catalog " + catName + ". " + e.getMessage(), e); + } Review Comment: ```suggestion } catch (NoSuchObjectException e) { throw new HiveException("Catalog " + catName + " does not exist.", e); } catch (MetaException | TException e) { throw new HiveException("Unable to alter catalog " + catName + ". " + e.getMessage(), e); } ``` ########## ql/src/test/queries/clientpositive/catalog.q: ########## @@ -0,0 +1,47 @@ +set hive.mapred.mode=nonstrict; +set hive.support.concurrency = true; + +-- SORT_QUERY_RESULTS +SHOW CATALOGS; + +-- CREATE with comment +CREATE CATALOG test_cat LOCATION '/tmp/test_cat' COMMENT 'Hive test catalog'; + +-- CREATE INE already exists +CREATE CATALOG IF NOT EXISTS test_cat LOCATION '/tmp/test_cat'; +SHOW CATALOGS; + +-- DROP +DROP CATALOG test_cat; +SHOW CATALOGS; + +-- CREATE INE doesn't exist +CREATE CATALOG IF NOT EXISTS test_cat LOCATION '/tmp/test_cat' COMMENT 'Hive test catalog'; +SHOW CATALOGS; + +-- DROP IE exists +DROP CATALOG IF EXISTS test_cat; +SHOW CATALOGS; + +-- DROP IE doesn't exist +DROP CATALOG IF EXISTS test_cat; + +-- SHOW +CREATE CATALOG test_cat LOCATION '/tmp/test_cat' COMMENT 'Hive test catalog'; +SHOW CATALOGS; + +-- SHOW pattern +SHOW CATALOGS LIKE 'test%'; + +-- SHOW pattern +SHOW CATALOGS LIKE 'test_'; + +-- SHOW pattern +SHOW CATALOGS LIKE 'test__'; + +-- DESCRIBE +DESC CATALOG test_cat; Review Comment: Can we add a `desc formatted & EXTENDED` test? ########## ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogAnalyzer.java: ########## @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.create; + +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for catalog creation commands. + */ +@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_CREATECATALOG) +public class CreateCatalogAnalyzer extends BaseSemanticAnalyzer { + public CreateCatalogAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String catalogName = unescapeIdentifier(root.getChild(0).getText()); + String locationUrl = unescapeSQLString(root.getChild(1).getChild(0).getText()); + outputs.add(toWriteEntity(locationUrl)); + + boolean ifNotExists = false; + String comment = null; + + for (int i = 2; i < root.getChildCount(); i++) { + ASTNode childNode = (ASTNode) root.getChild(i); + switch (childNode.getToken().getType()) { + case HiveParser.TOK_IFNOTEXISTS: + ifNotExists = true; + break; + case HiveParser.TOK_CATALOGCOMMENT: + comment = unescapeSQLString(childNode.getChild(0).getText()); + break; + default: Review Comment: The bad code indentation ########## ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogFormatter.java: ########## @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.desc; + +import org.apache.hadoop.hive.common.type.CalendarUtils; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ddl.ShowUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.formatting.MapBuilder; +import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; +import org.apache.hive.common.util.HiveStringUtils; + +import java.io.DataOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +/** + * Formats DESC CATALOG results. + */ +abstract class DescCatalogFormatter { + static DescCatalogFormatter getFormatter(HiveConf hiveConf) { + if (MetaDataFormatUtils.isJson(hiveConf)) { + return new JsonDescCatalogFormatter(); + } + return new TextDescCatalogFormatter(); + } + + abstract void showCatalogDescription(DataOutputStream out, String catalog, String comment, String location, + int createTime) throws HiveException; + + // ------ Implementations ------ + static class JsonDescCatalogFormatter extends DescCatalogFormatter { + @Override + void showCatalogDescription(DataOutputStream out, String catalog, String comment, String location, + int createTime) throws HiveException { + MapBuilder builder = MapBuilder.create() + .put("catalog", catalog) + .put("comment", comment) + .put("location", location); + if (createTime != 0) { + builder.put("createTime", CalendarUtils.formatTimestamp((long) createTime * 1000, true)); + } + ShowUtils.asJson(out, builder.build()); + } + } + + static class TextDescCatalogFormatter extends DescCatalogFormatter { + @Override + void showCatalogDescription(DataOutputStream out, String catalog, String comment, String location, + int createTime) throws HiveException { + try { + out.write("Catalog Name".getBytes(StandardCharsets.UTF_8)); + out.write(Utilities.tabCode); + out.write(catalog.getBytes(StandardCharsets.UTF_8)); + if (comment != null) { + out.write(Utilities.newLineCode); + out.write("Comment".getBytes(StandardCharsets.UTF_8)); + out.write(Utilities.tabCode); + out.write(HiveStringUtils.escapeJava(comment).getBytes(StandardCharsets.UTF_8)); + } + if (location != null) { + out.write(Utilities.newLineCode); + out.write("Location".getBytes(StandardCharsets.UTF_8)); + out.write(Utilities.tabCode); + out.write(location.getBytes(StandardCharsets.UTF_8)); + } + if (createTime != 0) { + out.write(Utilities.newLineCode); + out.write("Create Time".getBytes(StandardCharsets.UTF_8)); + out.write(Utilities.tabCode); + String str = CalendarUtils.formatTimestamp((long) createTime * 1000, true); + out.write(str.getBytes(StandardCharsets.UTF_8)); + } + out.write(Utilities.newLineCode); + } catch (IOException e) { + throw new HiveException(e); + } + } Review Comment: GPT's suggedtion, I think it is good: ```suggestion void showCatalogDescription(DataOutputStream out, String catalog, String comment, String location, int createTime) throws HiveException { try { writeLine(out, "Catalog Name", catalog); if (comment != null) { writeLine(out, "Comment", HiveStringUtils.escapeJava(comment)); } if (location != null) { writeLine(out, "Location", location); } if (createTime != 0) { String createTimeStr = CalendarUtils.formatTimestamp((long) createTime * 1000, true); writeLine(out, "Create Time", createTimeStr); } out.write(Utilities.newLineCode); } catch (IOException e) { throw new HiveException("Error writing catalog description", e); } } private void writeLine(DataOutputStream out, String label, String value) throws IOException { out.write(label.getBytes(StandardCharsets.UTF_8)); out.write(Utilities.tabCode); out.write(value.getBytes(StandardCharsets.UTF_8)); out.write(Utilities.newLineCode); } ``` ########## ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogDesc.java: ########## @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.desc; + +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; + +import java.io.Serializable; + +/** + * DDL task description for DESC CATALOG commands. + */ +@Explain(displayName = "Describe Catalog", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) +public class DescCatalogDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + public static final String DESC_CATALOG_SCHEMA = "cat_name,comment,location#string:string:string"; + + public static final String DESC_CATALOG_SCHEMA_EXTENDED = "cat_name,comment,location,create_time#string:string:string:string"; Review Comment: What's the usage of this code? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org For additional commands, e-mail: gitbox-h...@hive.apache.org