Repository: ambari
Updated Branches:
  refs/heads/trunk cda2ea25f -> e0c8df5a9


AMBARI-8599. Support altering TTL on Phoenix tables.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e0c8df5a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e0c8df5a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e0c8df5a

Branch: refs/heads/trunk
Commit: e0c8df5a93726933021cdfb62b94bd1894c17d20
Parents: cda2ea2
Author: Siddharth Wagle <swa...@hortonworks.com>
Authored: Mon Dec 8 20:17:58 2014 -0800
Committer: Siddharth Wagle <swa...@hortonworks.com>
Committed: Mon Dec 8 20:17:58 2014 -0800

----------------------------------------------------------------------
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 37 +++++++++++++++++---
 .../metrics/timeline/PhoenixTransactSQL.java    |  5 +++
 2 files changed, 38 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e0c8df5a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index 4f248b7..cb28a8b 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.util.RetryCounterFactory;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
+import org.apache.phoenix.exception.SQLExceptionCode;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.type.TypeReference;
 import java.io.IOException;
@@ -39,6 +40,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 import static java.util.concurrent.TimeUnit.SECONDS;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.ALTER_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_AGGREGATE_HOURLY_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_AGGREGATE_MINUTE_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_SQL;
@@ -47,6 +49,10 @@ import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_NAME;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_CLUSTER_AGGREGATE_TABLE_NAME;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_RECORD_TABLE_NAME;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.UPSERT_AGGREGATE_RECORD_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_SQL;
@@ -239,13 +245,36 @@ public class PhoenixHBaseAccessor {
         encoding, clusterMinTtl, compression));
       
stmt.executeUpdate(String.format(CREATE_METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_SQL,
         encoding, clusterHourTtl, compression));
+
+      //alter TTL options to update tables
+      stmt.executeUpdate(String.format(ALTER_SQL,
+        METRICS_RECORD_TABLE_NAME,
+        precisionTtl));
+      stmt.executeUpdate(String.format(ALTER_SQL,
+        METRICS_AGGREGATE_MINUTE_TABLE_NAME,
+        hostMinTtl));
+      stmt.executeUpdate(String.format(ALTER_SQL,
+        METRICS_AGGREGATE_HOURLY_TABLE_NAME,
+        hostHourTtl));
+      stmt.executeUpdate(String.format(ALTER_SQL,
+        METRICS_CLUSTER_AGGREGATE_TABLE_NAME,
+        clusterMinTtl));
+      stmt.executeUpdate(String.format(ALTER_SQL,
+        METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_NAME,
+        clusterHourTtl));
+
       conn.commit();
     } catch (SQLException sql) {
-      LOG.warn("Error creating Metrics Schema in HBase using Phoenix.", sql);
-      throw new MetricsInitializationException(
-        "Error creating Metrics Schema in HBase using Phoenix.", sql);
+      if (sql.getErrorCode() ==
+        SQLExceptionCode.SET_UNSUPPORTED_PROP_ON_ALTER_TABLE.getErrorCode()) {
+        LOG.warn("Cannot update TTL on tables. " + sql.getMessage());
+      } else {
+        LOG.error("Error creating Metrics Schema in HBase using Phoenix.", 
sql);
+        throw new MetricsInitializationException(
+          "Error creating Metrics Schema in HBase using Phoenix.", sql);
+      }
     } catch (InterruptedException e) {
-      LOG.warn("Error creating Metrics Schema in HBase using Phoenix.", e);
+      LOG.error("Error creating Metrics Schema in HBase using Phoenix.", e);
       throw new MetricsInitializationException(
         "Error creating Metrics Schema in HBase using Phoenix.", e);
     } finally {

http://git-wip-us.apache.org/repos/asf/ambari/blob/e0c8df5a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
index aef71e1..93ba2d8 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
@@ -116,6 +116,11 @@ public class PhoenixTransactSQL {
       "TTL=%s, COMPRESSION='%s'";
 
   /**
+   * ALTER table to set new options
+   */
+  public static final String ALTER_SQL = "ALTER TABLE %s SET TTL=%s";
+
+  /**
    * Insert into metric records table.
    */
   public static final String UPSERT_METRICS_SQL = "UPSERT INTO %s " +

Reply via email to