Repository: hive
Updated Branches:
  refs/heads/master 4536dcd68 -> f13ee0897


http://git-wip-us.apache.org/repos/asf/hive/blob/f13ee089/service/src/java/org/apache/hive/service/cli/ICLIService.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/ICLIService.java 
b/service/src/java/org/apache/hive/service/cli/ICLIService.java
index 0a54bdd..e4aef96 100644
--- a/service/src/java/org/apache/hive/service/cli/ICLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/ICLIService.java
@@ -98,5 +98,13 @@ public interface ICLIService {
   void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory 
authFactory,
       String tokenStr) throws HiveSQLException;
 
+  OperationHandle getPrimaryKeys(SessionHandle sessionHandle, String catalog,
+    String schema, String table) throws HiveSQLException;
+
+  OperationHandle getCrossReference(SessionHandle sessionHandle,
+    String primaryCatalog, String primarySchema, String primaryTable,
+    String foreignCatalog, String foreignSchema, String foreignTable)
+    throws HiveSQLException;
+
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f13ee089/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
 
b/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
new file mode 100644
index 0000000..a6439b6
--- /dev/null
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest;
+import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.RowSetFactory;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hadoop.hive.serde2.thrift.Type;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * GetCrossReferenceOperation.
+ *
+ */
+public class GetCrossReferenceOperation extends MetadataOperation {
+  /**
+  PKTABLE_CAT String => parent key table catalog (may be null)
+  PKTABLE_SCHEM String => parent key table schema (may be null)
+  PKTABLE_NAME String => parent key table name
+  PKCOLUMN_NAME String => parent key column name
+  FKTABLE_CAT String => foreign key table catalog (may be null) being exported 
(may be null)
+  FKTABLE_SCHEM String => foreign key table schema (may be null) being 
exported (may be null)
+  FKTABLE_NAME String => foreign key table name being exported
+  FKCOLUMN_NAME String => foreign key column name being exported
+  KEY_SEQ short => sequence number within foreign key( a value of 1 represents 
the first column of the foreign key, a value of 2 would represent the second 
column within the foreign key).
+  UPDATE_RULE short => What happens to foreign key when parent key is updated:
+  importedNoAction - do not allow update of parent key if it has been imported
+  importedKeyCascade - change imported key to agree with parent key update
+  importedKeySetNull - change imported key to NULL if its parent key has been 
updated
+  importedKeySetDefault - change imported key to default values if its parent 
key has been updated
+  importedKeyRestrict - same as importedKeyNoAction (for ODBC 2.x 
compatibility)
+  DELETE_RULE short => What happens to the foreign key when parent key is 
deleted.
+  importedKeyNoAction - do not allow delete of parent key if it has been 
imported
+  importedKeyCascade - delete rows that import a deleted key
+  importedKeySetNull - change imported key to NULL if its primary key has been 
deleted
+  importedKeyRestrict - same as importedKeyNoAction (for ODBC 2.x 
compatibility)
+  importedKeySetDefault - change imported key to default if its parent key has 
been deleted
+  FK_NAME String => foreign key name (may be null)
+  PK_NAME String => parent key name (may be null)
+  DEFERRABILITY short => can the evaluation of foreign key constraints be 
deferred until commit
+  importedKeyInitiallyDeferred - see SQL92 for definition
+  importedKeyInitiallyImmediate - see SQL92 for definition
+  importedKeyNotDeferrable - see SQL92 for definition
+ */
+  private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
+  .addPrimitiveColumn("PKTABLE_CAT", Type.STRING_TYPE,
+      "Parent key table catalog (may be null)")
+  .addPrimitiveColumn("PKTABLE_SCHEM", Type.STRING_TYPE,
+      "Parent key table schema (may be null)")
+  .addPrimitiveColumn("PKTABLE_NAME", Type.STRING_TYPE,
+      "Parent Key table name")
+  .addPrimitiveColumn("PKCOLUMN_NAME", Type.STRING_TYPE,
+      "Parent Key column name")
+  .addPrimitiveColumn("FKTABLE_CAT", Type.STRING_TYPE,
+      "Foreign key table catalog (may be null)")
+  .addPrimitiveColumn("FKTABLE_SCHEM", Type.STRING_TYPE,
+      "Foreign key table schema (may be null)")
+  .addPrimitiveColumn("FKTABLE_NAME", Type.STRING_TYPE,
+      "Foreign Key table name")
+  .addPrimitiveColumn("FKCOLUMN_NAME", Type.STRING_TYPE,
+      "Foreign Key column name")
+  .addPrimitiveColumn("KEQ_SEQ", Type.INT_TYPE,
+      "Sequence number within primary key")
+  .addPrimitiveColumn("UPDATE_RULE", Type.INT_TYPE,
+      "What happens to foreign key when parent key is updated")
+  .addPrimitiveColumn("DELETE_RULE", Type.INT_TYPE,
+      "What happens to foreign key when parent key is deleted")
+  .addPrimitiveColumn("FK_NAME", Type.STRING_TYPE,
+      "Foreign key name (may be null)")
+  .addPrimitiveColumn("PK_NAME", Type.STRING_TYPE,
+      "Primary key name (may be null)")
+  .addPrimitiveColumn("DEFERRABILITY", Type.INT_TYPE,
+      "Can the evaluation of foreign key constraints be deferred until 
commit");
+  private final String parentCatalogName;
+  private final String parentSchemaName;
+  private final String parentTableName;
+  private final String foreignCatalogName;
+  private final String foreignSchemaName;
+  private final String foreignTableName;
+  private final RowSet rowSet;
+
+  public GetCrossReferenceOperation(HiveSession parentSession,
+      String parentCatalogName, String parentSchemaName, String 
parentTableName,
+      String foreignCatalog, String foreignSchema, String foreignTable) {
+    super(parentSession, OperationType.GET_FUNCTIONS);
+    this.parentCatalogName = parentCatalogName;
+    this.parentSchemaName = parentSchemaName;
+    this.parentTableName = parentTableName;
+    this.foreignCatalogName = foreignCatalog;
+    this.foreignSchemaName = foreignSchema;
+    this.foreignTableName = foreignTable;
+    this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, 
getProtocolVersion(), false);
+  }
+
+  @Override
+  public void runInternal() throws HiveSQLException {
+    setState(OperationState.RUNNING);
+    try {
+       IMetaStoreClient metastoreClient = 
getParentSession().getMetaStoreClient();
+     ForeignKeysRequest fkReq = new ForeignKeysRequest(parentSchemaName, 
parentTableName, foreignSchemaName, foreignTableName);
+     List<SQLForeignKey> fks = metastoreClient.getForeignKeys(fkReq);
+      if (fks == null) {
+        return;
+      }
+      for (SQLForeignKey fk : fks) {
+        rowSet.addRow(new Object[] {parentCatalogName,
+        fk.getPktable_db(), fk.getPktable_name(), fk.getPkcolumn_name(),
+        foreignCatalogName,
+        fk.getFktable_db(), fk.getFktable_name(), fk.getFkcolumn_name(),
+        fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), 
fk.getFk_name(),
+        fk.getPk_name(), 0});
+      }
+      setState(OperationState.FINISHED);
+    } catch (Exception e) {
+      setState(OperationState.ERROR);
+      throw new HiveSQLException(e);
+    }
+  }
+
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.Operation#getResultSetSchema()
+   */
+  @Override
+  public TableSchema getResultSetSchema() throws HiveSQLException {
+    assertState(new 
ArrayList<OperationState>(Arrays.asList(OperationState.FINISHED)));
+    return RESULT_SET_SCHEMA;
+  }
+
+  /* (non-Javadoc)
+   * @see 
org.apache.hive.service.cli.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation,
 long)
+   */
+  @Override
+  public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) 
throws HiveSQLException {
+    assertState(new 
ArrayList<OperationState>(Arrays.asList(OperationState.FINISHED)));
+    validateDefaultFetchOrientation(orientation);
+    if (orientation.equals(FetchOrientation.FETCH_FIRST)) {
+      rowSet.setStartOffset(0);
+    }
+    return rowSet.extractSubset((int)maxRows);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/f13ee089/service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java
 
b/service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java
new file mode 100644
index 0000000..5ec510d
--- /dev/null
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.cli.operation;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.lang.NumberUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest;
+import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hive.service.cli.FetchOrientation;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.OperationState;
+import org.apache.hive.service.cli.OperationType;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.RowSetFactory;
+import org.apache.hive.service.cli.TableSchema;
+import org.apache.hadoop.hive.serde2.thrift.Type;
+import org.apache.hive.service.cli.session.HiveSession;
+
+/**
+ * GetPrimaryKeysOperation.
+ *
+ */
+public class GetPrimaryKeysOperation extends MetadataOperation {
+/**
+TABLE_CAT String => table catalog (may be null)
+TABLE_SCHEM String => table schema (may be null)
+TABLE_NAME String => table name
+COLUMN_NAME String => column name
+KEY_SEQ short => sequence number within primary key( a value of 1 represents 
the first column of the primary key, a value of 2 would represent the second 
column within the primary key).
+PK_NAME String => primary key name (may be null)
+ */
+  private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
+  .addPrimitiveColumn("TABLE_CAT", Type.STRING_TYPE,
+      "Table catalog (may be null)")
+  .addPrimitiveColumn("TABLE_SCHEM", Type.STRING_TYPE,
+      "Table schema (may be null)")
+  .addPrimitiveColumn("TABLE_NAME", Type.STRING_TYPE,
+      "Table name")
+  .addPrimitiveColumn("COLUMN_NAME", Type.STRING_TYPE,
+      "Column name")
+  .addPrimitiveColumn("KEQ_SEQ", Type.INT_TYPE,
+      "Sequence number within primary key")
+  .addPrimitiveColumn("PK_NAME", Type.STRING_TYPE,
+      "Primary key name (may be null)");
+
+  private final String catalogName;
+  private final String schemaName;
+  private final String tableName;
+
+  private final RowSet rowSet;
+
+  public GetPrimaryKeysOperation(HiveSession parentSession,
+      String catalogName, String schemaName, String tableName) {
+    super(parentSession, OperationType.GET_FUNCTIONS);
+    this.catalogName = catalogName;
+    this.schemaName = schemaName;
+    this.tableName = tableName;
+    this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, 
getProtocolVersion(), false);
+  }
+
+  @Override
+  public void runInternal() throws HiveSQLException {
+    setState(OperationState.RUNNING);
+    try {
+      IMetaStoreClient metastoreClient = 
getParentSession().getMetaStoreClient();
+      PrimaryKeysRequest sqlReq = new PrimaryKeysRequest(schemaName, 
tableName);
+      List<SQLPrimaryKey> pks = metastoreClient.getPrimaryKeys(sqlReq);
+      if (pks == null) {
+        return;
+      }
+      for (SQLPrimaryKey pk : pks) {
+           rowSet.addRow(new Object[] {catalogName, pk.getTable_db(), 
+             pk.getTable_name(), pk.getColumn_name(), pk.getKey_seq(), 
pk.getPk_name()});
+         }
+         setState(OperationState.FINISHED);
+       } catch (Exception e) {
+         setState(OperationState.ERROR);
+         throw new HiveSQLException(e);
+       }
+  }
+
+
+  /* (non-Javadoc)
+   * @see org.apache.hive.service.cli.Operation#getResultSetSchema()
+   */
+  @Override
+  public TableSchema getResultSetSchema() throws HiveSQLException {
+    assertState(new 
ArrayList<OperationState>(Arrays.asList(OperationState.FINISHED)));
+    return RESULT_SET_SCHEMA;
+  }
+
+  /* (non-Javadoc)
+   * @see 
org.apache.hive.service.cli.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation,
 long)
+   */
+  @Override
+  public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) 
throws HiveSQLException {
+    assertState(new 
ArrayList<OperationState>(Arrays.asList(OperationState.FINISHED)));
+    validateDefaultFetchOrientation(orientation);
+    if (orientation.equals(FetchOrientation.FETCH_FIRST)) {
+      rowSet.setStartOffset(0);
+    }
+    return rowSet.extractSubset((int)maxRows);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/f13ee089/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java 
b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
index eb3ab21..52e4b4d 100644
--- 
a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
+++ 
b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
@@ -169,6 +169,25 @@ public class OperationManager extends AbstractService {
     return operation;
   }
 
+  public GetPrimaryKeysOperation newGetPrimaryKeysOperation(HiveSession 
parentSession,
+             String catalogName, String schemaName, String tableName) {
+    GetPrimaryKeysOperation operation = new 
GetPrimaryKeysOperation(parentSession,
+           catalogName, schemaName, tableName);
+       addOperation(operation);
+       return operation;
+  }
+
+  public GetCrossReferenceOperation newGetCrossReferenceOperation(
+   HiveSession session, String primaryCatalog, String primarySchema,
+   String primaryTable, String foreignCatalog, String foreignSchema,
+   String foreignTable) {
+   GetCrossReferenceOperation operation = new 
GetCrossReferenceOperation(session,
+     primaryCatalog, primarySchema, primaryTable, foreignCatalog, 
foreignSchema,
+     foreignTable);
+   addOperation(operation);
+   return operation;
+  }
+
   public Operation getOperation(OperationHandle operationHandle) throws 
HiveSQLException {
     Operation operation = getOperationInternal(operationHandle);
     if (operation == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f13ee089/service/src/java/org/apache/hive/service/cli/session/HiveSession.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/session/HiveSession.java 
b/service/src/java/org/apache/hive/service/cli/session/HiveSession.java
index 4f4e92d..9ea643b 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSession.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSession.java
@@ -136,6 +136,34 @@ public interface HiveSession extends HiveSessionBase {
       String functionName) throws HiveSQLException;
 
   /**
+   * getPrimaryKeys operation handler
+   * @param catalog
+   * @param schema
+   * @param table
+   * @return
+   * @throws HiveSQLException
+   */
+  OperationHandle getPrimaryKeys(String catalog, String schema, String table)
+    throws HiveSQLException;
+
+
+  /**
+   * getCrossReference operation handler
+   * @param primaryCatalog
+   * @param primarySchema
+   * @param primaryTable
+   * @param foreignCatalog
+   * @param foreignSchema
+   * @param foreignTable
+   * @return
+   * @throws HiveSQLException
+   */
+  OperationHandle getCrossReference(String primaryCatalog,
+             String primarySchema, String primaryTable, String foreignCatalog,
+             String foreignSchema, String foreignTable) 
+    throws HiveSQLException;
+  
+  /**
    * close the session
    * @throws HiveSQLException
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/f13ee089/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java 
b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index ce50967..c24f162 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -63,7 +63,9 @@ import org.apache.hive.service.cli.TableSchema;
 import org.apache.hive.service.cli.operation.ExecuteStatementOperation;
 import org.apache.hive.service.cli.operation.GetCatalogsOperation;
 import org.apache.hive.service.cli.operation.GetColumnsOperation;
+import org.apache.hive.service.cli.operation.GetCrossReferenceOperation;
 import org.apache.hive.service.cli.operation.GetFunctionsOperation;
+import org.apache.hive.service.cli.operation.GetPrimaryKeysOperation;
 import org.apache.hive.service.cli.operation.GetSchemasOperation;
 import org.apache.hive.service.cli.operation.GetTableTypesOperation;
 import org.apache.hive.service.cli.operation.GetTypeInfoOperation;
@@ -838,4 +840,49 @@ public class HiveSessionImpl implements HiveSession {
   private String getUserFromToken(HiveAuthFactory authFactory, String 
tokenStr) throws HiveSQLException {
     return authFactory.getUserFromToken(tokenStr);
   }
+
+  @Override
+  public OperationHandle getPrimaryKeys(String catalog, String schema,
+    String table) throws HiveSQLException {
+    acquire(true);
+
+    OperationManager operationManager = getOperationManager();
+    GetPrimaryKeysOperation operation = operationManager
+        .newGetPrimaryKeysOperation(getSession(), catalog, schema, table);
+    OperationHandle opHandle = operation.getHandle();
+    try {
+      operation.run();
+      addOpHandle(opHandle);
+      return opHandle;
+    } catch (HiveSQLException e) {
+      operationManager.closeOperation(opHandle);
+      throw e;
+    } finally {
+      release(true);
+    }
+  }
+
+  @Override
+  public OperationHandle getCrossReference(String primaryCatalog,
+    String primarySchema, String primaryTable, String foreignCatalog,
+    String foreignSchema, String foreignTable) throws HiveSQLException {
+    acquire(true);
+
+    OperationManager operationManager = getOperationManager();
+    GetCrossReferenceOperation operation = operationManager
+      .newGetCrossReferenceOperation(getSession(), primaryCatalog,
+         primarySchema, primaryTable, foreignCatalog,
+         foreignSchema, foreignTable);
+    OperationHandle opHandle = operation.getHandle();
+    try {
+      operation.run();
+      addOpHandle(opHandle);
+      return opHandle;
+    } catch (HiveSQLException e) {
+      operationManager.closeOperation(opHandle);
+      throw e;
+    } finally {
+      release(true);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/f13ee089/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java
 
b/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java
index 14191e5..b2e0e9e 100644
--- 
a/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java
+++ 
b/service/src/java/org/apache/hive/service/cli/thrift/RetryingThriftCLIServiceClient.java
@@ -207,6 +207,22 @@ public class RetryingThriftCLIServiceClient implements 
InvocationHandler {
     public void closeTransport() {
       tTransport.close();
     }
+
+    @Override
+    public OperationHandle getPrimaryKeys(SessionHandle sessionHandle,
+      String catalog, String schema, String table)
+      throws HiveSQLException {
+      return cliService.getPrimaryKeys(sessionHandle, catalog, schema, table);
+    }
+
+    @Override
+    public OperationHandle getCrossReference(SessionHandle sessionHandle,
+      String primaryCatalog, String primarySchema, String primaryTable,
+      String foreignCatalog, String foreignSchema, String foreignTable)
+      throws HiveSQLException {
+      return cliService.getCrossReference(sessionHandle, primaryCatalog, 
primarySchema,
+        primaryTable, foreignCatalog, foreignSchema, foreignTable);
+    }
   }
 
   protected RetryingThriftCLIServiceClient(HiveConf conf) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f13ee089/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 74263e3..6ede1d7 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -66,6 +66,8 @@ import org.apache.hive.service.rpc.thrift.TGetCatalogsReq;
 import org.apache.hive.service.rpc.thrift.TGetCatalogsResp;
 import org.apache.hive.service.rpc.thrift.TGetColumnsReq;
 import org.apache.hive.service.rpc.thrift.TGetColumnsResp;
+import org.apache.hive.service.rpc.thrift.TGetCrossReferenceReq;
+import org.apache.hive.service.rpc.thrift.TGetCrossReferenceResp;
 import org.apache.hive.service.rpc.thrift.TGetDelegationTokenReq;
 import org.apache.hive.service.rpc.thrift.TGetDelegationTokenResp;
 import org.apache.hive.service.rpc.thrift.TGetFunctionsReq;
@@ -74,6 +76,8 @@ import org.apache.hive.service.rpc.thrift.TGetInfoReq;
 import org.apache.hive.service.rpc.thrift.TGetInfoResp;
 import org.apache.hive.service.rpc.thrift.TGetOperationStatusReq;
 import org.apache.hive.service.rpc.thrift.TGetOperationStatusResp;
+import org.apache.hive.service.rpc.thrift.TGetPrimaryKeysReq;
+import org.apache.hive.service.rpc.thrift.TGetPrimaryKeysResp;
 import org.apache.hive.service.rpc.thrift.TGetResultSetMetadataReq;
 import org.apache.hive.service.rpc.thrift.TGetResultSetMetadataResp;
 import org.apache.hive.service.rpc.thrift.TGetSchemasReq;
@@ -697,6 +701,41 @@ public abstract class ThriftCLIService extends 
AbstractService implements TCLISe
   }
 
   @Override
+  public TGetPrimaryKeysResp GetPrimaryKeys(TGetPrimaryKeysReq req)
+               throws TException {
+    TGetPrimaryKeysResp resp = new TGetPrimaryKeysResp();
+    try {
+      OperationHandle opHandle = cliService.getPrimaryKeys(
+      new SessionHandle(req.getSessionHandle()), req.getCatalogName(),
+      req.getSchemaName(), req.getTableName());
+      resp.setOperationHandle(opHandle.toTOperationHandle());
+      resp.setStatus(OK_STATUS);
+    } catch (Exception e) {
+     LOG.warn("Error getting functions: ", e);
+     resp.setStatus(HiveSQLException.toTStatus(e));
+    }
+    return resp;
+  }
+
+  @Override
+  public TGetCrossReferenceResp GetCrossReference(TGetCrossReferenceReq req)
+               throws TException {
+    TGetCrossReferenceResp resp = new TGetCrossReferenceResp();
+    try {
+      OperationHandle opHandle = cliService.getCrossReference(
+        new SessionHandle(req.getSessionHandle()), req.getParentCatalogName(),
+             req.getParentSchemaName(), req.getParentTableName(),
+          req.getForeignCatalogName(), req.getForeignSchemaName(), 
req.getForeignTableName());
+          resp.setOperationHandle(opHandle.toTOperationHandle());
+          resp.setStatus(OK_STATUS);
+    } catch (Exception e) {
+      LOG.warn("Error getting functions: ", e);
+         resp.setStatus(HiveSQLException.toTStatus(e));
+       }
+    return resp;
+  }
+  
+  @Override
   public abstract void run();
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/f13ee089/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
----------------------------------------------------------------------
diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
index ccce6dc..098aea6 100644
--- 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
+++ 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIServiceClient.java
@@ -52,6 +52,8 @@ import org.apache.hive.service.rpc.thrift.TGetCatalogsReq;
 import org.apache.hive.service.rpc.thrift.TGetCatalogsResp;
 import org.apache.hive.service.rpc.thrift.TGetColumnsReq;
 import org.apache.hive.service.rpc.thrift.TGetColumnsResp;
+import org.apache.hive.service.rpc.thrift.TGetCrossReferenceReq;
+import org.apache.hive.service.rpc.thrift.TGetCrossReferenceResp;
 import org.apache.hive.service.rpc.thrift.TGetDelegationTokenReq;
 import org.apache.hive.service.rpc.thrift.TGetDelegationTokenResp;
 import org.apache.hive.service.rpc.thrift.TGetFunctionsReq;
@@ -60,6 +62,8 @@ import org.apache.hive.service.rpc.thrift.TGetInfoReq;
 import org.apache.hive.service.rpc.thrift.TGetInfoResp;
 import org.apache.hive.service.rpc.thrift.TGetOperationStatusReq;
 import org.apache.hive.service.rpc.thrift.TGetOperationStatusResp;
+import org.apache.hive.service.rpc.thrift.TGetPrimaryKeysReq;
+import org.apache.hive.service.rpc.thrift.TGetPrimaryKeysResp;
 import org.apache.hive.service.rpc.thrift.TGetResultSetMetadataReq;
 import org.apache.hive.service.rpc.thrift.TGetResultSetMetadataResp;
 import org.apache.hive.service.rpc.thrift.TGetSchemasReq;
@@ -492,4 +496,47 @@ public class ThriftCLIServiceClient extends 
CLIServiceClient {
       throw new HiveSQLException(e);
     }
   }
+
+  @Override
+  public OperationHandle getPrimaryKeys(SessionHandle sessionHandle,
+    String catalog, String schema, String table) throws HiveSQLException {
+    try {
+      TGetPrimaryKeysReq req = new 
TGetPrimaryKeysReq(sessionHandle.toTSessionHandle());
+      req.setCatalogName(catalog);
+      req.setSchemaName(schema);
+      req.setTableName(table);
+      TGetPrimaryKeysResp resp = cliService.GetPrimaryKeys(req);
+      checkStatus(resp.getStatus());
+      TProtocolVersion protocol = sessionHandle.getProtocolVersion();
+      return new OperationHandle(resp.getOperationHandle(), protocol);
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
+
+  @Override
+  public OperationHandle getCrossReference(SessionHandle sessionHandle,
+               String primaryCatalog, String primarySchema, String 
primaryTable,
+               String foreignCatalog, String foreignSchema, String 
foreignTable)
+               throws HiveSQLException {
+    try {
+      TGetCrossReferenceReq req = new 
TGetCrossReferenceReq(sessionHandle.toTSessionHandle());
+      req.setParentCatalogName(primaryCatalog);
+      req.setParentSchemaName(primarySchema);
+      req.setParentTableName(primaryTable);
+      req.setForeignCatalogName(foreignCatalog);
+      req.setForeignSchemaName(foreignSchema);
+      req.setForeignTableName(foreignTable);
+      TGetCrossReferenceResp resp = cliService.GetCrossReference(req);
+      checkStatus(resp.getStatus());
+      TProtocolVersion protocol = sessionHandle.getProtocolVersion();
+      return new OperationHandle(resp.getOperationHandle(), protocol);
+    } catch (HiveSQLException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new HiveSQLException(e);
+    }
+  }
 }

Reply via email to