Repository: phoenix
Updated Branches:
  refs/heads/master b7cd7e505 -> c65ef907f


PHOENIX-2132 Pherf - Fix drop all command and execution from Eclipse/IDE


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c65ef907
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c65ef907
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c65ef907

Branch: refs/heads/master
Commit: c65ef907f4c2e8edbcc563e2fea3decca41c4fe9
Parents: b7cd7e5
Author: Mujtaba <mujt...@apache.org>
Authored: Fri Jul 24 11:18:52 2015 -0700
Committer: Mujtaba <mujt...@apache.org>
Committed: Fri Jul 24 11:18:52 2015 -0700

----------------------------------------------------------------------
 phoenix-pherf/pom.xml                           | 25 ++++++---
 .../apache/phoenix/pherf/util/PhoenixUtil.java  | 55 +++++++++++++++-----
 2 files changed, 60 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c65ef907/phoenix-pherf/pom.xml
----------------------------------------------------------------------
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 8343be0..e08670c 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -69,7 +69,6 @@
                <dependency>
                        <groupId>junit</groupId>
                        <artifactId>junit</artifactId>
-                       <version>4.11</version>
                        <scope>test</scope>
                </dependency>
                <dependency>
@@ -91,14 +90,12 @@
                <dependency>
                        <groupId>org.apache.hbase</groupId>
                        <artifactId>hbase-testing-util</artifactId>
-                       <version>${hbase.version}</version>
                        <scope>test</scope>
                        <optional>true</optional>
                </dependency>
                <dependency>
                        <groupId>org.apache.hbase</groupId>
                        <artifactId>hbase-it</artifactId>
-                       <version>${hbase.version}</version>
                        <type>test-jar</type>
                        <scope>test</scope>
                </dependency>
@@ -109,6 +106,12 @@
                        <resource>
                                <directory>src/main/resources</directory>
                        </resource>
+                       <resource>
+                               <directory>config</directory>
+                               <includes>
+                                       <include>**/*.properties</include>
+                               </includes>
+                       </resource>
                </resources>
                <testResources>
                        <testResource>
@@ -129,13 +132,11 @@
                        </plugin>
                        <plugin>
                                <artifactId>maven-dependency-plugin</artifactId>
-                               
<version>${maven-dependency-plugin.version}</version>
                        </plugin>
 
                        <plugin>
                                <groupId>org.apache.maven.plugins</groupId>
                                <artifactId>maven-compiler-plugin</artifactId>
-                               <version>3.1</version>
                                <configuration>
                                        <source>1.7</source>
                                        <target>1.7</target>
@@ -177,17 +178,27 @@
                                                </goals>
                                                <configuration>
                                                        <descriptors>
-                                                           <!-- Produces 
minimal Pherf jar -->
+                                                               <!-- Produces 
minimal Pherf jar -->
                                                                
<descriptor>src/main/assembly/minimal.xml</descriptor>
                                                                <!-- Produces 
standalone zip that bundles all required dependencies -->
                                                                
<descriptor>src/main/assembly/standalone.xml</descriptor>
-                                                               <!-- Produces 
cluster zip with minimal Pherf jar. Setting HBase classpath in env.sh is 
required for this configuration -->                                             
                 
+                                                               <!-- Produces 
cluster zip with minimal Pherf jar. Setting HBase classpath 
+                                                                       in 
env.sh is required for this configuration -->
                                                                
<descriptor>src/main/assembly/cluster.xml</descriptor>
                                                        </descriptors>
                                                </configuration>
                                        </execution>
                                </executions>
                        </plugin>
+                       <plugin>
+                               <groupId>org.apache.rat</groupId>
+                               <artifactId>apache-rat-plugin</artifactId>
+                               <configuration>
+                                       <excludes>
+                                               <exclude>RESULTS/**</exclude>
+                                       </excludes>
+                               </configuration>
+                       </plugin>
                </plugins>
        </build>
 </project>

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c65ef907/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
----------------------------------------------------------------------
diff --git 
a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java 
b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
index 0156149..5f7d637 100644
--- a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
+++ b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/util/PhoenixUtil.java
@@ -22,19 +22,17 @@ import org.apache.phoenix.pherf.PherfConstants;
 import org.apache.phoenix.pherf.configuration.Column;
 import org.apache.phoenix.pherf.configuration.DataTypeMapping;
 
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
 import java.sql.*;
 import java.util.*;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.phoenix.pherf.configuration.Query;
 import org.apache.phoenix.pherf.configuration.QuerySet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME;
+
 // TODO This class needs to be cleanup up a bit. I just wanted to get an 
initial placeholder in.
 public class PhoenixUtil {
        private static final Logger logger = 
LoggerFactory.getLogger(PhoenixUtil.class);
@@ -95,6 +93,26 @@ public class PhoenixUtil {
         }
         return result;
     }
+
+    /**
+     * Execute statement
+     * @param sql
+     * @param connection
+     * @return
+     * @throws SQLException
+     */
+    public boolean executeStatementThrowException(String sql, Connection 
connection) throws SQLException {
+       boolean result = false;
+       PreparedStatement preparedStatement = null;
+       try {
+            preparedStatement = connection.prepareStatement(sql);
+            result = preparedStatement.execute();
+            connection.commit();
+        } finally {
+            preparedStatement.close();
+        }
+        return result;
+    }
     
     public boolean executeStatement(String sql, Connection connection) {
        boolean result = false;
@@ -139,14 +157,25 @@ public class PhoenixUtil {
        Connection conn = getConnection();
        try {
                ResultSet resultSet = 
getTableMetaData(PherfConstants.PHERF_SCHEMA_NAME, null, conn);
-               while (resultSet.next()) {
-                       String tableName = resultSet.getString("TABLE_SCHEMA") 
== null ? resultSet.getString("TABLE_NAME") :
-                                                          
resultSet.getString("TABLE_SCHEMA") + "." + resultSet.getString("TABLE_NAME");
-                       if (tableName.matches(regexMatch)) {
-                               logger.info("\nDropping " + tableName);
-                               executeStatement("DROP TABLE " + tableName + " 
CASCADE", conn);
-                       }
-               }
+                       while (resultSet.next()) {
+                               String tableName = 
resultSet.getString(TABLE_SCHEM) == null ? resultSet
+                                               .getString(TABLE_NAME) : 
resultSet
+                                               .getString(TABLE_SCHEM)
+                                               + "."
+                                               + 
resultSet.getString(TABLE_NAME);
+                               if (tableName.matches(regexMatch)) {
+                                       logger.info("\nDropping " + tableName);
+                                       try {
+                                               
executeStatementThrowException("DROP TABLE "
+                                                               + tableName + " 
CASCADE", conn);
+                                       } catch 
(org.apache.phoenix.schema.TableNotFoundException tnf) {
+                                               logger.error("Table might be 
already be deleted via cascade. Schema: "
+                                                               + 
tnf.getSchemaName()
+                                                               + " Table: "
+                                                               + 
tnf.getTableName());
+                                       }
+                               }
+                       }
        } finally {
                conn.close();
        }

Reply via email to