Repository: ambari
Updated Branches:
  refs/heads/branch-2.5 457b7635c -> 0de07e093


Revert "AMBARI-20113 : hive20 view : fixed : Not able to compute the table 
statistics for partitioned table (nitirajrathore)"

This reverts commit 457b7635c6f0242ae108626c15168dd8196c1a17.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0de07e09
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0de07e09
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0de07e09

Branch: refs/heads/branch-2.5
Commit: 0de07e09361b79db25090546a46516affee5ada2
Parents: 457b763
Author: Nitiraj Singh Rathore <nitiraj.rath...@gmail.com>
Authored: Wed Mar 1 23:26:46 2017 +0530
Committer: Nitiraj Singh Rathore <nitiraj.rath...@gmail.com>
Committed: Wed Mar 1 23:26:46 2017 +0530

----------------------------------------------------------------------
 .../generators/AnalyzeTableQueryGenerator.java  |  49 +----
 .../view/hive20/resources/browser/DDLProxy.java |   6 +-
 .../hive20/resources/browser/DDLService.java    |   4 +-
 .../AnalyzeTableQueryGeneratorSpecTest.groovy   | 193 -------------------
 4 files changed, 11 insertions(+), 241 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0de07e09/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGenerator.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGenerator.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGenerator.java
index dbe9d43..1d68407 100644
--- 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGenerator.java
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGenerator.java
@@ -18,58 +18,23 @@
 
 package org.apache.ambari.view.hive20.internal.query.generators;
 
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
 import com.google.common.base.Optional;
-import com.google.common.collect.FluentIterable;
 import org.apache.ambari.view.hive20.exceptions.ServiceException;
-import org.apache.ambari.view.hive20.internal.dto.ColumnInfo;
-import org.apache.ambari.view.hive20.internal.dto.TableMeta;
-
-import javax.annotation.Nullable;
-
-import static 
org.apache.ambari.view.hive20.internal.query.generators.QueryGenerationUtils.isNullOrEmpty;
 
 public class AnalyzeTableQueryGenerator implements QueryGenerator {
-  private TableMeta tableMeta;
+  private final String databaseName;
+  private final String tableName;
   private final Boolean shouldAnalyzeColumns;
 
-  public AnalyzeTableQueryGenerator(TableMeta tableMeta, Boolean 
shouldAnalyzeColumns) {
-    this.tableMeta = tableMeta;
+  public AnalyzeTableQueryGenerator(String databaseName, String tableName, 
Boolean shouldAnalyzeColumns) {
+    this.databaseName = databaseName;
+    this.tableName = tableName;
     this.shouldAnalyzeColumns = shouldAnalyzeColumns;
   }
 
   @Override
   public Optional<String> getQuery() throws ServiceException {
-    StringBuilder query = new StringBuilder("ANALYZE TABLE " );
-    
query.append("`").append(tableMeta.getDatabase()).append("`").append(".").append("`").append(tableMeta.getTable()).append("`");
-
-    if( null != tableMeta.getPartitionInfo() && 
!isNullOrEmpty(tableMeta.getPartitionInfo().getColumns())){
-      query.append(" PARTITION (")
-           .append(Joiner.on(",")
-              
.join(FluentIterable.from(tableMeta.getPartitionInfo().getColumns())
-                  .transform(
-                  new Function<ColumnInfo, Object>() {
-                    @Nullable
-                    @Override
-                    public Object apply(@Nullable ColumnInfo columnInfo) {
-                      return columnInfo.getName();
-                    }
-                  })
-              )
-           )
-           .append(")");
-    }
-
-
-    query.append(" COMPUTE STATISTICS ");
-
-    if(shouldAnalyzeColumns){
-      query.append(" FOR COLUMNS ");
-    }
-
-    query.append(";");
-
-    return Optional.of(query.toString());
+    return Optional.of("ANALYZE TABLE " + "`" + databaseName + "`.`" + 
tableName + "`" + " COMPUTE STATISTICS " +
+      (shouldAnalyzeColumns? " FOR COLUMNS ": "") + ";");
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0de07e09/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
index 0c93ba3..e433dc4 100644
--- 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLProxy.java
@@ -332,10 +332,8 @@ public class DDLProxy {
     }
   }
 
-  public Job analyzeTable(String databaseName, String tableName, Boolean 
shouldAnalyzeColumns, JobResourceManager resourceManager, ConnectionConfig 
hiveConnectionConfig) throws ServiceException {
-    TableMeta tableMeta = this.getTableProperties(context, 
hiveConnectionConfig, databaseName, tableName);
-
-    AnalyzeTableQueryGenerator queryGenerator = new 
AnalyzeTableQueryGenerator(tableMeta, shouldAnalyzeColumns);
+  public Job analyzeTable(String databaseName, String tableName, Boolean 
shouldAnalyzeColumns, JobResourceManager resourceManager) throws 
ServiceException {
+    AnalyzeTableQueryGenerator queryGenerator = new 
AnalyzeTableQueryGenerator(databaseName, tableName, shouldAnalyzeColumns);
     Optional<String> analyzeTable = queryGenerator.getQuery();
     String jobTitle = "Analyze table " + databaseName + "." + tableName;
     if(analyzeTable.isPresent()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/0de07e09/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
index f5a4781..89b9d84 100644
--- 
a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
+++ 
b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/DDLService.java
@@ -37,6 +37,7 @@ import org.slf4j.LoggerFactory;
 import javax.inject.Inject;
 import javax.ws.rs.Consumes;
 import javax.ws.rs.DELETE;
+import javax.ws.rs.FormParam;
 import javax.ws.rs.GET;
 import javax.ws.rs.POST;
 import javax.ws.rs.PUT;
@@ -183,8 +184,7 @@ public class DDLService extends BaseService {
       shouldAnalyzeColumns = Boolean.valueOf(analyzeColumns.trim());
     }
     try {
-      ConnectionConfig hiveConnectionConfig = getHiveConnectionConfig();
-      Job job = proxy.analyzeTable(databaseName, tableName, 
shouldAnalyzeColumns, getResourceManager(), hiveConnectionConfig);
+      Job job = proxy.analyzeTable(databaseName, tableName, 
shouldAnalyzeColumns, getResourceManager());
       JSONObject response = new JSONObject();
       response.put("job", job);
       return 
Response.status(Response.Status.ACCEPTED).entity(response).build();

http://git-wip-us.apache.org/repos/asf/ambari/blob/0de07e09/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGeneratorSpecTest.groovy
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGeneratorSpecTest.groovy
 
b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGeneratorSpecTest.groovy
deleted file mode 100644
index 91d1b44..0000000
--- 
a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/internal/query/generators/AnalyzeTableQueryGeneratorSpecTest.groovy
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-package org.apache.ambari.view.hive20.internal.query.generators;
-
-import com.google.gson.Gson;
-import org.apache.ambari.view.hive20.internal.dto.TableMeta;
-import spock.lang.Specification;
-
-class AnalyzeTableQueryGeneratorSpecTest extends Specification {
-  def "analyze with partition and for columns"() {
-    setup:
-    String tableMetaJson = "{" +
-            "\"database\": \"d1\"," +
-            "\"table\": \"t2\"," +
-            "\"columns\": [{" +
-            "\"name\": \"col_name1\"," +
-            "\"type\": \"string\"," +
-            "\"comment\": \"col_name1 comment\"" +
-            "}, {" +
-            "\"name\": \"col_name2\"," +
-            "\"type\": \"decimal(10,2)\"," +
-            "\"comment\": \"col_name2 comment\"" +
-            "}]," +
-            "\"partitionInfo\": {" +
-            "\"columns\": [{" +
-            "\"name\": \"col_name4\"," +
-            "\"type\": \"char(1)\"," +
-            "\"comment\": \"col_name4 comment\"" +
-            "}, {" +
-            "\"name\": \"col_name3\"," +
-            "\"type\": \"string\"," +
-            "\"comment\": \"col_name3 comment\"" +
-            "}]" +
-            "}" +
-            "}";
-
-    TableMeta tableMeta = new Gson().fromJson(tableMetaJson, TableMeta.class);
-    AnalyzeTableQueryGenerator generator = new 
AnalyzeTableQueryGenerator(tableMeta, true);
-
-    when:
-    Optional<String> databaseDeleteQuery = generator.getQuery()
-
-    then:
-    databaseDeleteQuery.isPresent()
-
-    when:
-    String query = databaseDeleteQuery.get();
-
-    then:
-    query == "ANALYZE TABLE `d1`.`t2` PARTITION (col_name4,col_name3) COMPUTE 
STATISTICS  FOR COLUMNS ;"
-  }
-  def "analyze with partition"() {
-    setup:
-    String tableMetaJson = "{" +
-            "\"database\": \"d1\"," +
-            "\"table\": \"t2\"," +
-            "\"columns\": [{" +
-            "\"name\": \"col_name1\"," +
-            "\"type\": \"string\"," +
-            "\"comment\": \"col_name1 comment\"" +
-            "}, {" +
-            "\"name\": \"col_name2\"," +
-            "\"type\": \"decimal(10,2)\"," +
-            "\"comment\": \"col_name2 comment\"" +
-            "}]," +
-            "\"partitionInfo\": {" +
-            "\"columns\": [{" +
-            "\"name\": \"col_name4\"," +
-            "\"type\": \"char(1)\"," +
-            "\"comment\": \"col_name4 comment\"" +
-            "}, {" +
-            "\"name\": \"col_name3\"," +
-            "\"type\": \"string\"," +
-            "\"comment\": \"col_name3 comment\"" +
-            "}]" +
-            "}" +
-            "}";
-
-    TableMeta tableMeta = new Gson().fromJson(tableMetaJson, TableMeta.class);
-    AnalyzeTableQueryGenerator generator = new 
AnalyzeTableQueryGenerator(tableMeta, false);
-
-    when:
-    Optional<String> databaseDeleteQuery = generator.getQuery()
-
-    then:
-    databaseDeleteQuery.isPresent()
-
-    when:
-    String query = databaseDeleteQuery.get();
-
-    then:
-    query == "ANALYZE TABLE `d1`.`t2` PARTITION (col_name4,col_name3) COMPUTE 
STATISTICS ;"
-  }
-
-  def "analyze without partition"() {
-    setup:
-    String tableMetaJson = "{" +
-            "\"database\": \"d1\"," +
-            "\"table\": \"t2\"," +
-            "\"columns\": [{" +
-            "\"name\": \"col_name1\"," +
-            "\"type\": \"string\"," +
-            "\"comment\": \"col_name1 comment\"" +
-            "}, {" +
-            "\"name\": \"col_name2\"," +
-            "\"type\": \"decimal(10,2)\"," +
-            "\"comment\": \"col_name2 comment\"" +
-            "}," +
-            "{" +
-            "\"name\": \"col_name4\"," +
-            "\"type\": \"char(1)\"," +
-            "\"comment\": \"col_name4 comment\"" +
-            "}, {" +
-            "\"name\": \"col_name3\"," +
-            "\"type\": \"string\"," +
-            "\"comment\": \"col_name3 comment\"" +
-            "}" +
-            "]" +
-            "}";
-
-    TableMeta tableMeta = new Gson().fromJson(tableMetaJson, TableMeta.class);
-    AnalyzeTableQueryGenerator generator = new 
AnalyzeTableQueryGenerator(tableMeta, true);
-
-    when:
-    Optional<String> databaseDeleteQuery = generator.getQuery()
-
-    then:
-    databaseDeleteQuery.isPresent()
-
-    when:
-    String query = databaseDeleteQuery.get();
-
-    then:
-    query == "ANALYZE TABLE `d1`.`t2` COMPUTE STATISTICS  FOR COLUMNS ;"
-  }
-
-  def "analyze for table only"() {
-    setup:
-    String tableMetaJson = "{" +
-            "\"database\": \"d1\"," +
-            "\"table\": \"t2\"," +
-            "\"columns\": [{" +
-            "\"name\": \"col_name1\"," +
-            "\"type\": \"string\"," +
-            "\"comment\": \"col_name1 comment\"" +
-            "}, {" +
-            "\"name\": \"col_name2\"," +
-            "\"type\": \"decimal(10,2)\"," +
-            "\"comment\": \"col_name2 comment\"" +
-            "}," +
-            "{" +
-            "\"name\": \"col_name4\"," +
-            "\"type\": \"char(1)\"," +
-            "\"comment\": \"col_name4 comment\"" +
-            "}, {" +
-            "\"name\": \"col_name3\"," +
-            "\"type\": \"string\"," +
-            "\"comment\": \"col_name3 comment\"" +
-            "}" +
-            "]" +
-            "}";
-
-    TableMeta tableMeta = new Gson().fromJson(tableMetaJson, TableMeta.class);
-    AnalyzeTableQueryGenerator generator = new 
AnalyzeTableQueryGenerator(tableMeta, false);
-
-    when:
-    Optional<String> databaseDeleteQuery = generator.getQuery()
-
-    then:
-    databaseDeleteQuery.isPresent()
-
-    when:
-    String query = databaseDeleteQuery.get();
-
-    then:
-    query == "ANALYZE TABLE `d1`.`t2` COMPUTE STATISTICS ;"
-  }
-}

Reply via email to