This is an automated email from the ASF dual-hosted git repository.

ngangam pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit 87c74ec2d5f86e911d7b832f4d58b5e5e8f4c161
Author: Naveen Gangam <ngan...@cloudera.com>
AuthorDate: Tue Nov 10 12:27:03 2020 -0500

    Adding DDL support for connectors (create/drop/show/desc/alter)
---
 .../hadoop/hive/ql/parse/AlterClauseParser.g       |  4 +-
 .../apache/hadoop/hive/ql/parse/CreateDDLParser.g  |  9 ++-
 .../hadoop/hive/ql/parse/IdentifiersParser.g       |  2 +-
 .../ql/ddl/database/create/CreateDatabaseDesc.java |  9 ---
 .../alter/AbstractAlterDataConnectorAnalyzer.java  | 42 +++++++++++
 .../alter/AbstractAlterDataConnectorDesc.java      | 54 +++++++++++++
 .../alter/AbstractAlterDataConnectorOperation.java | 59 +++++++++++++++
 .../owner/AlterDataConnectorSetOwnerAnalyzer.java  | 54 +++++++++++++
 .../owner/AlterDataConnectorSetOwnerDesc.java      | 45 +++++++++++
 .../owner/AlterDataConnectorSetOwnerOperation.java | 41 ++++++++++
 .../AlterDataConnectorSetPropertiesAnalyzer.java   | 58 ++++++++++++++
 .../AlterDataConnectorSetPropertiesDesc.java       | 47 ++++++++++++
 .../AlterDataConnectorSetPropertiesOperation.java  | 49 ++++++++++++
 .../url/AlterDataConnectorSetUrlAnalyzer.java      | 47 ++++++++++++
 .../alter/url/AlterDataConnectorSetUrlDesc.java    | 43 +++++++++++
 .../url/AlterDataConnectorSetUrlOperation.java     | 65 ++++++++++++++++
 .../create/CreateDataConnectorAnalyzer.java        | 88 ++++++++++++++++++++++
 .../create/CreateDataConnectorDesc.java            | 80 ++++++++++++++++++++
 .../create/CreateDataConnectorOperation.java       | 71 +++++++++++++++++
 .../desc/DescDataConnectorAnalyzer.java            | 61 +++++++++++++++
 .../dataconnector/desc/DescDataConnectorDesc.java  | 71 +++++++++++++++++
 .../desc/DescDataConnectorOperation.java           | 62 +++++++++++++++
 .../drop/DropDataConnectorAnalyzer.java            | 59 +++++++++++++++
 .../dataconnector/drop/DropDataConnectorDesc.java  | 62 +++++++++++++++
 .../drop/DropDataConnectorOperation.java           | 65 ++++++++++++++++
 .../show/ShowDataConnectorsAnalyzer.java           | 57 ++++++++++++++
 .../dataconnector/show/ShowDataConnectorsDesc.java | 54 +++++++++++++
 .../show/ShowDataConnectorsOperation.java          | 66 ++++++++++++++++
 28 files changed, 1411 insertions(+), 13 deletions(-)

diff --git 
a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
index c5074be..4625618 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
@@ -463,8 +463,8 @@ alterDataConnectorStatementSuffix
 alterDataConnectorSuffixProperties
 @init { gParent.pushMsg("alter connector set properties statement", state); }
 @after { gParent.popMsg(state); }
-    : name=identifier KW_SET KW_DBPROPERTIES dbProperties
-    -> ^(TOK_ALTERDATACONNECTOR_PROPERTIES $name dbProperties)
+    : name=identifier KW_SET KW_DCPROPERTIES dcProperties
+    -> ^(TOK_ALTERDATACONNECTOR_PROPERTIES $name dcProperties)
     ;
 
 alterDataConnectorSuffixSetOwner
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/CreateDDLParser.g 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/CreateDDLParser.g
index 434ae57..69da7c7 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/CreateDDLParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/CreateDDLParser.g
@@ -112,7 +112,7 @@ createTableStatement
 createDataConnectorStatement
 @init { gParent.pushMsg("create connector statement", state); }
 @after { gParent.popMsg(state); }
-    : KW_CREATE KW_DATACONNECTOR ifNotExists? name=identifier 
dataConnectorType dataConnectorUrl dataConnectorComment? ( KW_WITH 
KW_PROPERTIES dcprops=dbProperties)?
+    : KW_CREATE KW_DATACONNECTOR ifNotExists? name=identifier 
dataConnectorType dataConnectorUrl dataConnectorComment? ( KW_WITH 
KW_DCPROPERTIES dcprops=dcProperties)?
     -> ^(TOK_CREATEDATACONNECTOR $name ifNotExists? dataConnectorType 
dataConnectorUrl dataConnectorComment? $dcprops?)
     ;
 
@@ -137,6 +137,13 @@ dataConnectorType
     -> ^(TOK_DATACONNECTORTYPE $dcType)
     ;
 
+dcProperties
+@init { gParent.pushMsg("dcproperties", state); }
+@after { gParent.popMsg(state); }
+    :
+      LPAREN dbPropertiesList RPAREN -> ^(TOK_DATACONNECTORPROPERTIES 
dbPropertiesList)
+    ;
+
 dropDataConnectorStatement
 @init { gParent.pushMsg("drop connector statement", state); }
 @after { gParent.popMsg(state); }
diff --git 
a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 9402471..d836b80 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -913,7 +913,7 @@ nonReserved
     KW_ABORT | KW_ADD | KW_ADMIN | KW_AFTER | KW_ANALYZE | KW_ARCHIVE | KW_ASC 
| KW_BEFORE | KW_BUCKET | KW_BUCKETS
     | KW_CASCADE | KW_CBO | KW_CHANGE | KW_CHECK | KW_CLUSTER | KW_CLUSTERED | 
KW_CLUSTERSTATUS | KW_COLLECTION | KW_COLUMNS
     | KW_COMMENT | KW_COMPACT | KW_COMPACTIONS | KW_COMPUTE | KW_CONCATENATE | 
KW_CONTINUE | KW_COST | KW_DATA | KW_DAY
-    | KW_DATABASES | KW_DATETIME | KW_DBPROPERTIES | KW_DEFERRED | KW_DEFINED 
| KW_DELIMITED | KW_DEPENDENCY
+    | KW_DATABASES | KW_DATETIME | KW_DBPROPERTIES | KW_DCPROPERTIES | 
KW_DEFERRED | KW_DEFINED | KW_DELIMITED | KW_DEPENDENCY
     | KW_DESC | KW_DIRECTORIES | KW_DIRECTORY | KW_DISABLE | KW_DISTRIBUTE | 
KW_DISTRIBUTED | KW_DOW | KW_ELEM_TYPE
     | KW_ENABLE | KW_ENFORCED | KW_ESCAPED | KW_EXCLUSIVE | KW_EXPLAIN | 
KW_EXPORT | KW_FIELDS | KW_FILE | KW_FILEFORMAT
     | KW_FIRST | KW_FORMAT | KW_FORMATTED | KW_FUNCTIONS | KW_HOLD_DDLTIME | 
KW_HOUR | KW_IDXPROPERTIES | KW_RESPECT | KW_IGNORE
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java
index ef9d8c8..45df31c 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java
@@ -49,15 +49,6 @@ public class CreateDatabaseDesc implements DDLDesc, 
Serializable {
   public CreateDatabaseDesc(String databaseName, String comment, String 
locationUri, String managedLocationUri,
       boolean ifNotExists, Map<String, String> dbProperties) {
     this(databaseName, comment, locationUri, managedLocationUri, ifNotExists, 
dbProperties, "NATIVE", null, null);
-/*
-    this.databaseName = databaseName;
-    this.comment = comment;
-    this.locationUri = locationUri;
-    this.managedLocationUri = managedLocationUri;
-    this.ifNotExists = ifNotExists;
-    this.dbProperties = dbProperties;
-
- */
   }
 
   public CreateDatabaseDesc(String databaseName, String comment, String 
locationUri, String managedLocationUri,
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorAnalyzer.java
new file mode 100644
index 0000000..28e5def
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorAnalyzer.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for data connector alteration commands.
+ */
+public abstract class AbstractAlterDataConnectorAnalyzer extends 
BaseSemanticAnalyzer {
+  public AbstractAlterDataConnectorAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  protected void addAlterDataConnectorDesc(AbstractAlterDataConnectorDesc 
alterDesc) throws SemanticException {
+    DataConnector connector = getDataConnector(alterDesc.getConnectorName());
+    outputs.add(new WriteEntity(connector, WriteEntity.WriteType.DDL_NO_LOCK));
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
alterDesc)));
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorDesc.java
new file mode 100644
index 0000000..281378f
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorDesc.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER CONNECTOR commands.
+ */
+public abstract class AbstractAlterDataConnectorDesc implements DDLDesc, 
Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String connectorName;
+  private final ReplicationSpec replicationSpec;
+
+  public AbstractAlterDataConnectorDesc(String connectorName, ReplicationSpec 
replicationSpec) {
+    this.connectorName = connectorName;
+    this.replicationSpec = replicationSpec;
+  }
+
+  @Explain(displayName="name", explainLevels = {Level.USER, Level.DEFAULT, 
Level.EXTENDED })
+  public String getConnectorName() {
+    return connectorName;
+  }
+
+  /**
+   * @return what kind of replication scope this alter is running under.
+   * This can result in a "ALTER IF NEWER THAN" kind of semantic
+   */
+  public ReplicationSpec getReplicationSpec() {
+    return this.replicationSpec;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
new file mode 100644
index 0000000..a29dd4a
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a data connector.
+ */
+public abstract class AbstractAlterDataConnectorOperation<T extends 
AbstractAlterDataConnectorDesc> extends DDLOperation<T> {
+  public AbstractAlterDataConnectorOperation(DDLOperationContext context, T 
desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    String dcName = desc.getConnectorName();
+    DataConnector connector = context.getDb().getDataConnector(dcName);
+    if (connector == null) {
+      throw new HiveException(ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, dcName);
+    }
+
+    Map<String, String> params = connector.getParameters();
+    if ((null != desc.getReplicationSpec()) &&
+        !desc.getReplicationSpec().allowEventReplacementInto(params)) {
+      LOG.debug("DDLTask: Alter Connector {} is skipped as connector is newer 
than update", dcName);
+      return 0; // no replacement, the existing connector state is newer than 
our update.
+    }
+
+    doAlteration(connector, params);
+
+    context.getDb().alterDataConnector(connector.getName(), connector);
+    return 0;
+  }
+
+  protected abstract void doAlteration(DataConnector connector, Map<String, 
String> params) throws HiveException;
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerAnalyzer.java
new file mode 100644
index 0000000..1fb34e1
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerAnalyzer.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.owner;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import 
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.authorization.AuthorizationParseUtils;
+
+/**
+ * Analyzer for data connector set owner commands.
+ */
+@DDLType(types = HiveParser.TOK_ALTERDATACONNECTOR_OWNER)
+public class AlterDataConnectorSetOwnerAnalyzer extends 
AbstractAlterDataConnectorAnalyzer {
+  public AlterDataConnectorSetOwnerAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    String connectorName = getUnescapedName((ASTNode) root.getChild(0));
+    PrincipalDesc principalDesc = 
AuthorizationParseUtils.getPrincipalDesc((ASTNode) root.getChild(1));
+
+    if (principalDesc.getName() == null) {
+      throw new SemanticException("Owner name can't be null in alter connector 
set owner command");
+    }
+    if (principalDesc.getType() == null) {
+      throw new SemanticException("Owner type can't be null in alter connector 
set owner command");
+    }
+
+    AlterDataConnectorSetOwnerDesc desc = new 
AlterDataConnectorSetOwnerDesc(connectorName, principalDesc, null);
+    addAlterDataConnectorDesc(desc);
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerDesc.java
new file mode 100644
index 0000000..8d2c0c3
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerDesc.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.owner;
+
+import 
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorDesc;
+import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER CONNECTOR ... SET OWNER ... commands.
+ */
+@Explain(displayName = "Set DataConnector Owner", explainLevels = { 
Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterDataConnectorSetOwnerDesc extends 
AbstractAlterDataConnectorDesc {
+  private static final long serialVersionUID = 1L;
+
+  private final PrincipalDesc ownerPrincipal;
+
+  public AlterDataConnectorSetOwnerDesc(String connectorName, PrincipalDesc 
ownerPrincipal, ReplicationSpec replicationSpec) {
+    super(connectorName, replicationSpec);
+    this.ownerPrincipal = ownerPrincipal;
+  }
+
+  @Explain(displayName="owner")
+  public PrincipalDesc getOwnerPrincipal() {
+    return ownerPrincipal;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerOperation.java
new file mode 100644
index 0000000..7f06a4d
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerOperation.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.owner;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import 
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorOperation;
+
+/**
+ * Operation process of altering a connector's owner.
+ */
+public class AlterDataConnectorSetOwnerOperation extends
+    AbstractAlterDataConnectorOperation<AlterDataConnectorSetOwnerDesc> {
+  public AlterDataConnectorSetOwnerOperation(DDLOperationContext context, 
AlterDataConnectorSetOwnerDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  protected void doAlteration(DataConnector connector, Map<String, String> 
params) {
+    connector.setOwnerName(desc.getOwnerPrincipal().getName());
+    connector.setOwnerType(desc.getOwnerPrincipal().getType());
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesAnalyzer.java
new file mode 100644
index 0000000..1ddb73f
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesAnalyzer.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.properties;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import 
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorAnalyzer;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for connector set properties commands.
+ */
+@DDLType(types = HiveParser.TOK_ALTERDATACONNECTOR_PROPERTIES)
+public class AlterDataConnectorSetPropertiesAnalyzer extends 
AbstractAlterDataConnectorAnalyzer {
+  public AlterDataConnectorSetPropertiesAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    String connectorName = unescapeIdentifier(root.getChild(0).getText());
+
+    Map<String, String> dbProps = null;
+    for (int i = 1; i < root.getChildCount(); i++) {
+      ASTNode childNode = (ASTNode) root.getChild(i);
+      switch (childNode.getToken().getType()) {
+      case HiveParser.TOK_DATACONNECTORPROPERTIES:
+        dbProps = getProps((ASTNode) childNode.getChild(0));
+        break;
+      default:
+        throw new SemanticException("Unrecognized token in ALTER CONNECTOR 
statement");
+      }
+    }
+
+    AlterDataConnectorSetPropertiesDesc desc = new 
AlterDataConnectorSetPropertiesDesc(connectorName, dbProps, null);
+    addAlterDataConnectorDesc(desc);
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesDesc.java
new file mode 100644
index 0000000..07958c2
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesDesc.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.properties;
+
+import java.util.Map;
+
+import 
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorDesc;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER CONNECTOR ... SET PROPERTIES ... commands.
+ */
+@Explain(displayName = "Set Connector Properties", explainLevels = { 
Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterDataConnectorSetPropertiesDesc extends 
AbstractAlterDataConnectorDesc {
+  private static final long serialVersionUID = 1L;
+
+  private final Map<String, String> dcProperties;
+
+  public AlterDataConnectorSetPropertiesDesc(String connectorName, Map<String, 
String> dcProperties,
+      ReplicationSpec replicationSpec) {
+    super(connectorName, replicationSpec);
+    this.dcProperties = dcProperties;
+  }
+
+  @Explain(displayName="properties")
+  public Map<String, String> getConnectorProperties() {
+    return dcProperties;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesOperation.java
new file mode 100644
index 0000000..c9fa032
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesOperation.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.properties;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import 
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorOperation;
+
+/**
+ * Operation process of altering a dataconnector's properties.
+ */
+public class AlterDataConnectorSetPropertiesOperation
+    extends 
AbstractAlterDataConnectorOperation<AlterDataConnectorSetPropertiesDesc> {
+  public AlterDataConnectorSetPropertiesOperation(DDLOperationContext context, 
AlterDataConnectorSetPropertiesDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  protected void doAlteration(DataConnector connector, Map<String, String> 
params) {
+    Map<String, String> newParams = desc.getConnectorProperties();
+
+    // if both old and new params are not null, merge them
+    if (params != null && newParams != null) {
+      params.putAll(newParams);
+      connector.setParameters(params);
+    } else {
+      // if one of them is null, replace the old params with the new one
+      connector.setParameters(newParams);
+    }
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlAnalyzer.java
new file mode 100644
index 0000000..217f702
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlAnalyzer.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.url;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import 
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorAnalyzer;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for connector set url commands.
+ */
+@DDLType(types = HiveParser.TOK_ALTERDATACONNECTOR_URL)
+public class AlterDataConnectorSetUrlAnalyzer extends 
AbstractAlterDataConnectorAnalyzer {
+  public AlterDataConnectorSetUrlAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    String connectorName = getUnescapedName((ASTNode) root.getChild(0));
+    String newURL = unescapeSQLString(root.getChild(1).getText());
+
+    outputs.add(toWriteEntity(newURL));
+
+    AlterDataConnectorSetUrlDesc desc = new 
AlterDataConnectorSetUrlDesc(connectorName, newURL);
+    addAlterDataConnectorDesc(desc);
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlDesc.java
new file mode 100644
index 0000000..a142ea0
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlDesc.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.url;
+
+import 
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER CONNECTOR ... SET URL ... commands.
+ */
+@Explain(displayName = "Set Connector URL", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+public class AlterDataConnectorSetUrlDesc extends 
AbstractAlterDataConnectorDesc {
+  private static final long serialVersionUID = 1L;
+
+  private final String url;
+
+  public AlterDataConnectorSetUrlDesc(String connectorName, String location) {
+    super(connectorName, null);
+    this.url = location;
+  }
+
+  @Explain(displayName="url")
+  public String getURL() {
+    return url;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java
new file mode 100644
index 0000000..d9caee0
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.url;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Map;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import 
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a connector's URL.
+ */
+public class AlterDataConnectorSetUrlOperation extends
+    AbstractAlterDataConnectorOperation<AlterDataConnectorSetUrlDesc> {
+  public AlterDataConnectorSetUrlOperation(DDLOperationContext context, 
AlterDataConnectorSetUrlDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  protected void doAlteration(DataConnector connector, Map<String, String> 
params) throws HiveException {
+    try {
+      String newUrl = desc.getURL();
+
+      if (newUrl.equalsIgnoreCase(connector.getUrl())) {
+        throw new HiveException("Old and New URL's for data connector cannot 
be the same");
+      }
+
+      URI newURI = new URI(newUrl);
+      if (!newURI.isAbsolute() || StringUtils.isBlank(newURI.getScheme())) {
+        throw new HiveException(ErrorMsg.INVALID_PATH, newUrl); // TODO make a 
new error message for URL
+      }
+
+      if (newUrl.equals(connector.getUrl())) {
+        LOG.info("Alter Connector skipped. No change in url.");
+      } else {
+        connector.setUrl(newUrl);
+      }
+      return;
+    } catch (URISyntaxException e) {
+      throw new HiveException(e);
+    }
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/create/CreateDataConnectorAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/create/CreateDataConnectorAnalyzer.java
new file mode 100644
index 0000000..5867108
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/create/CreateDataConnectorAnalyzer.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.create;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for dataconnector creation commands.
+ */
+@DDLType(types = HiveParser.TOK_CREATEDATACONNECTOR)
+public class CreateDataConnectorAnalyzer extends BaseSemanticAnalyzer {
+  public CreateDataConnectorAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    boolean ifNotExists = false;
+    String comment = null;
+    String url = null;
+    String type = null;
+    Map<String, String> props = null;
+
+    String connectorName = unescapeIdentifier(root.getChild(0).getText());
+    for (int i = 1; i < root.getChildCount(); i++) {
+      ASTNode childNode = (ASTNode) root.getChild(i);
+      switch (childNode.getToken().getType()) {
+      case HiveParser.TOK_IFNOTEXISTS:
+        ifNotExists = true;
+        break;
+      case HiveParser.TOK_DATACONNECTORCOMMENT:
+        comment = unescapeSQLString(childNode.getChild(0).getText());
+        break;
+      case HiveParser.TOK_DATACONNECTORPROPERTIES:
+        props = getProps((ASTNode) childNode.getChild(0));
+        break;
+      case HiveParser.TOK_DATACONNECTORURL:
+        url = unescapeSQLString(childNode.getChild(0).getText());
+        outputs.add(toWriteEntity(url));
+        break;
+      case HiveParser.TOK_DATACONNECTORTYPE:
+        type = unescapeSQLString(childNode.getChild(0).getText());
+        break;
+      default:
+        throw new SemanticException("Unrecognized token in CREATE CONNECTOR 
statement");
+      }
+    }
+
+    CreateDataConnectorDesc desc = null;
+    DataConnector connector = new DataConnector(connectorName, type, url);
+    if (comment != null)
+      connector.setDescription(comment);
+    if (props != null)
+      connector.setParameters(props);
+
+    desc = new CreateDataConnectorDesc(connectorName, type, url, ifNotExists, 
comment, props);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc)));
+
+    outputs.add(new WriteEntity(connector, WriteEntity.WriteType.DDL_NO_LOCK));
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/create/CreateDataConnectorDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/create/CreateDataConnectorDesc.java
new file mode 100644
index 0000000..5d07c2c
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/create/CreateDataConnectorDesc.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.create;
+
+import java.io.Serializable;
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for CREATE DATACONNECTOR commands.
+ */
+@Explain(displayName = "Create DataConnector", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+public class CreateDataConnectorDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String connectorName;
+  private final String type;
+  private final String url;
+  private final String description;
+  private final boolean ifNotExists;
+  private final Map<String, String> dcProperties;
+
+  public CreateDataConnectorDesc(String connectorName, String type, String 
url, boolean ifNotExists, String description,
+      Map<String, String> dcProperties) {
+    this.connectorName = connectorName;
+    this.type = type;
+    this.url = url;
+    this.ifNotExists = ifNotExists;
+    this.dcProperties = dcProperties;
+    this.description = description;
+  }
+
+  @Explain(displayName="if not exists", displayOnlyOnTrue = true)
+  public boolean getIfNotExists() {
+    return ifNotExists;
+  }
+
+  public Map<String, String> getConnectorProperties() {
+    return dcProperties;
+  }
+
+  @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
+  public String getName() {
+    return connectorName;
+  }
+
+  @Explain(displayName="description")
+  public String getComment() {
+    return description;
+  }
+
+  @Explain(displayName="url")
+  public String getURL() {
+    return url;
+  }
+
+  @Explain(displayName="connector type")
+  public String getType() {
+    return type;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/create/CreateDataConnectorOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/create/CreateDataConnectorOperation.java
new file mode 100644
index 0000000..bca0033
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/create/CreateDataConnectorOperation.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.create;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Operation process of creating a dataconnector.
+ */
+public class CreateDataConnectorOperation extends 
DDLOperation<CreateDataConnectorDesc> {
+
+  public CreateDataConnectorOperation(DDLOperationContext context, 
CreateDataConnectorDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    try {
+      URI connectorUri = new URI(desc.getURL());
+      if (!connectorUri.isAbsolute() || 
StringUtils.isBlank(connectorUri.getScheme())) {
+        throw new HiveException(ErrorMsg.INVALID_PATH, desc.getURL()); // TODO 
make a new error message for URL
+      }
+
+      DataConnector connector = new DataConnector(desc.getName(), 
desc.getType(), desc.getURL());
+      if (desc.getComment() != null)
+        connector.setDescription(desc.getComment());
+      connector.setOwnerName(SessionState.getUserFromAuthenticator());
+      connector.setOwnerType(PrincipalType.USER);
+      if (desc.getConnectorProperties() != null)
+        connector.setParameters(desc.getConnectorProperties());
+      try {
+        context.getDb().createDataConnector(connector, desc.getIfNotExists());
+      } catch (AlreadyExistsException ex) {
+        //it would be better if AlreadyExistsException had an errorCode 
field....
+        throw new HiveException(ex, ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, 
desc.getName());
+      }
+
+      return 0;
+    } catch (URISyntaxException e) {
+      throw new HiveException(e);
+    }
+
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/desc/DescDataConnectorAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/desc/DescDataConnectorAnalyzer.java
new file mode 100644
index 0000000..b7f94d2
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/desc/DescDataConnectorAnalyzer.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.desc;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for data connector description commands.
+ */
+@DDLType(types = HiveParser.TOK_DESCDATACONNECTOR)
+public class DescDataConnectorAnalyzer extends BaseSemanticAnalyzer {
+  public DescDataConnectorAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() == 0 || root.getChildCount() > 2) {
+      throw new SemanticException("Unexpected Tokens at DESCRIBE CONNECTOR");
+    }
+
+    ctx.setResFile(ctx.getLocalTmpPath());
+
+    String connectorName = stripQuotes(root.getChild(0).getText());
+    boolean isExtended = root.getChildCount() == 2;
+
+    inputs.add(new ReadEntity(getDataConnector(connectorName)));
+
+    DescDataConnectorDesc desc = new DescDataConnectorDesc(ctx.getResFile(), 
connectorName, isExtended);
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), 
getOutputs(), desc));
+    rootTasks.add(task);
+
+    task.setFetchSource(true);
+    setFetchTask(createFetchTask(desc.getSchema()));
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/desc/DescDataConnectorDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/desc/DescDataConnectorDesc.java
new file mode 100644
index 0000000..4a3ee6a
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/desc/DescDataConnectorDesc.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.desc;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for DESC CONNECTOR commands.
+ */
+@Explain(displayName = "Describe Connector", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+public class DescDataConnectorDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  public static final String DESC_DATACONNECTOR_SCHEMA =
+      
"name,type,url,owner_name,owner_type,comment#string:string:string:string:string:string";
+
+  public static final String DESC_DATACONNECTOR_SCHEMA_EXTENDED =
+      "name,type,url,owner_name,owner_type,comment,parameters#" +
+      "string:string:string:string:string:string,string";
+
+  private final String resFile;
+  private final String dcName;
+  private final boolean isExtended;
+
+  public DescDataConnectorDesc(Path resFile, String dcName, boolean 
isExtended) {
+    this.resFile = resFile.toString();
+    this.dcName = dcName;
+    this.isExtended = isExtended;
+  }
+
+  @Explain(displayName = "extended", displayOnlyOnTrue=true,
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public boolean isExtended() {
+    return isExtended;
+  }
+
+  @Explain(displayName = "connector", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+  public String getConnectorName() {
+    return dcName;
+  }
+
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
+  public String getResFile() {
+    return resFile;
+  }
+
+  public String getSchema() {
+    return isExtended ? DESC_DATACONNECTOR_SCHEMA_EXTENDED : 
DESC_DATACONNECTOR_SCHEMA;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/desc/DescDataConnectorOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/desc/DescDataConnectorOperation.java
new file mode 100644
index 0000000..de76842
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/desc/DescDataConnectorOperation.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.desc;
+
+import java.io.DataOutputStream;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of describing a data connector.
+ */
+public class DescDataConnectorOperation extends 
DDLOperation<DescDataConnectorDesc> {
+  public DescDataConnectorOperation(DDLOperationContext context, 
DescDataConnectorDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    try (DataOutputStream outStream = DDLUtils.getOutputStream(new 
Path(desc.getResFile()), context)) {
+      DataConnector connector = 
context.getDb().getDataConnector(desc.getConnectorName());
+      if (connector == null) {
+        throw new HiveException(ErrorMsg.DATACONNECTOR_NOT_EXISTS, 
desc.getConnectorName());
+      }
+
+      SortedMap<String, String> params = null;
+      if (desc.isExtended()) {
+        params = new TreeMap<>(connector.getParameters());
+      }
+
+      context.getFormatter().showDataConnectorDescription(outStream, 
connector.getName(), connector.getType(),
+          connector.getUrl(), connector.getOwnerName(), 
connector.getOwnerType(), connector.getDescription(), params);
+    } catch (Exception e) {
+      throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
+    }
+
+    return 0;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/drop/DropDataConnectorAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/drop/DropDataConnectorAnalyzer.java
new file mode 100644
index 0000000..37817db
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/drop/DropDataConnectorAnalyzer.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.drop;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for data connector dropping commands.
+ */
+@DDLType(types = HiveParser.TOK_DROPDATACONNECTOR)
+public class DropDataConnectorAnalyzer extends BaseSemanticAnalyzer {
+  public DropDataConnectorAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    String connectorName = unescapeIdentifier(root.getChild(0).getText());
+    boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != 
null;
+
+    DataConnector connector = getDataConnector(connectorName, !ifExists);
+    if (connector == null) {
+      return;
+    }
+
+    inputs.add(new ReadEntity(connector));
+    outputs.add(new WriteEntity(connector, 
WriteEntity.WriteType.DDL_EXCLUSIVE));
+
+    DropDataConnectorDesc desc = new DropDataConnectorDesc(connectorName, 
ifExists, new ReplicationSpec());
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc)));
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/drop/DropDataConnectorDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/drop/DropDataConnectorDesc.java
new file mode 100644
index 0000000..8e3bc6b
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/drop/DropDataConnectorDesc.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.drop;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for DROP CONNECTOR commands.
+ */
+@Explain(displayName = "Drop Connector", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+public class DropDataConnectorDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String connectorName;
+  private final boolean ifExists;
+  private final ReplicationSpec replicationSpec;
+
+  public DropDataConnectorDesc(String connectorName, boolean ifExists, 
ReplicationSpec replicationSpec) {
+    this(connectorName, ifExists, false, replicationSpec);
+  }
+
+  public DropDataConnectorDesc(String connectorName, boolean ifExists, boolean 
cascade, ReplicationSpec replicationSpec) {
+    this.connectorName = connectorName;
+    this.ifExists = ifExists;
+    this.replicationSpec = replicationSpec;
+  }
+
+  @Explain(displayName = "connector", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+  public String getConnectorName() {
+    return connectorName;
+  }
+
+  @Explain(displayName = "if exists")
+  public boolean getIfExists() {
+    return ifExists;
+  }
+
+  public ReplicationSpec getReplicationSpec() {
+    return replicationSpec;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/drop/DropDataConnectorOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/drop/DropDataConnectorOperation.java
new file mode 100644
index 0000000..64692f7
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/drop/DropDataConnectorOperation.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.drop;
+
+import org.apache.hadoop.hive.llap.LlapHiveUtils;
+import org.apache.hadoop.hive.llap.ProactiveEviction;
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+
+/**
+ * Operation process of dropping a data connector.
+ */
+public class DropDataConnectorOperation extends 
DDLOperation<DropDataConnectorDesc> {
+  public DropDataConnectorOperation(DDLOperationContext context, 
DropDataConnectorDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    try {
+      String dcName = desc.getConnectorName();
+      ReplicationSpec replicationSpec = desc.getReplicationSpec();
+      if (replicationSpec.isInReplicationScope()) {
+        DataConnector connector = context.getDb().getDataConnector(dcName);
+        if (connector == null || 
!replicationSpec.allowEventReplacementInto(connector.getParameters())) {
+          return 0;
+        }
+      }
+
+      context.getDb().dropDataConnector(dcName, desc.getIfExists());
+
+      // TODO is this required for Connectors
+      if (LlapHiveUtils.isLlapMode(context.getConf())) {
+        ProactiveEviction.Request.Builder llapEvictRequestBuilder = 
ProactiveEviction.Request.Builder.create();
+        llapEvictRequestBuilder.addDb(dcName);
+        ProactiveEviction.evict(context.getConf(), 
llapEvictRequestBuilder.build());
+      }
+    } catch (NoSuchObjectException ex) {
+      throw new HiveException(ex, ErrorMsg.DATACONNECTOR_NOT_EXISTS, 
desc.getConnectorName());
+    }
+
+    return 0;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsAnalyzer.java
new file mode 100644
index 0000000..20ec39e
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsAnalyzer.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.show;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for show connectors commands.
+ */
+@DDLType(types = HiveParser.TOK_SHOWDATACONNECTORS)
+public class ShowDataConnectorsAnalyzer extends BaseSemanticAnalyzer {
+  public ShowDataConnectorsAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() > 1) {
+      throw new SemanticException("Unexpected Tokens at SHOW CONNECTORS");
+    }
+
+    ctx.setResFile(ctx.getLocalTmpPath());
+
+    String connectorPattern = root.getChildCount() == 1 ? 
unescapeSQLString(root.getChild(0).getText()) : null;
+    ShowDataConnectorsDesc desc = new ShowDataConnectorsDesc(ctx.getResFile(), 
connectorPattern);
+
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), 
getOutputs(), desc));
+    rootTasks.add(task);
+
+    task.setFetchSource(true);
+    
setFetchTask(createFetchTask(ShowDataConnectorsDesc.SHOW_DATACONNECTORS_SCHEMA));
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsDesc.java
new file mode 100644
index 0000000..c72799f
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsDesc.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.show;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for SHOW CONNECTORS commands.
+ */
+@Explain(displayName = "Show Connectors", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+public class ShowDataConnectorsDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  public static final String SHOW_DATACONNECTORS_SCHEMA = 
"connector_name#string";
+
+  private final String resFile;
+  private final String pattern;
+
+  public ShowDataConnectorsDesc(Path resFile, String pattern) {
+    this.resFile = resFile.toString();
+    this.pattern = pattern;
+  }
+
+  @Explain(displayName = "pattern")
+  public String getPattern() {
+    return pattern;
+  }
+
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
+  public String getResFile() {
+    return resFile;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsOperation.java
new file mode 100644
index 0000000..8ecc9eb
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsOperation.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.show;
+
+import java.io.DataOutputStream;
+import java.util.List;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.UDFLike;
+import org.apache.hadoop.io.IOUtils;
+
+/**
+ * Operation process of showing data connectors.
+ */
+public class ShowDataConnectorsOperation extends 
DDLOperation<ShowDataConnectorsDesc> {
+  public ShowDataConnectorsOperation(DDLOperationContext context, 
ShowDataConnectorsDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    List<String> connectors = context.getDb().getAllDataConnectors();
+    if (desc.getPattern() != null) {
+      LOG.debug("pattern: {}", desc.getPattern());
+      Pattern pattern = 
Pattern.compile(UDFLike.likePatternToRegExp(desc.getPattern()), 
Pattern.CASE_INSENSITIVE);
+      connectors = connectors.stream().filter(name -> 
pattern.matcher(name).matches()).collect(Collectors.toList());
+    }
+
+    LOG.info("Found {} connector(s) matching the SHOW CONNECTORS statement.", 
connectors.size());
+
+    // write the results in the file
+    DataOutputStream outStream = DDLUtils.getOutputStream(new 
Path(desc.getResFile()), context);
+    try {
+      context.getFormatter().showDataConnectors(outStream, connectors);
+    } catch (Exception e) {
+      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show connectors");
+    } finally {
+      IOUtils.closeStream(outStream);
+    }
+
+    return 0;
+  }
+}

Reply via email to