Repository: phoenix Updated Branches: refs/heads/calcite 0641043a4 -> 76e92a961
PHOENIX-2036 - PhoenixConfigurationUtil should provide a pre-normalize table name to PhoenixRuntime Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/be5aba5b Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/be5aba5b Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/be5aba5b Branch: refs/heads/calcite Commit: be5aba5bcdbbf28c4faa04414e9e4f4079276614 Parents: 1c10fda Author: ravimagham <[email protected]> Authored: Sat Jul 4 08:11:25 2015 -0700 Committer: ravimagham <[email protected]> Committed: Sat Jul 4 08:11:25 2015 -0700 ---------------------------------------------------------------------- .../org/apache/phoenix/util/PhoenixRuntime.java | 3 +- .../java/org/apache/phoenix/util/QueryUtil.java | 4 +- .../org/apache/phoenix/util/SchemaUtil.java | 18 ++++++- .../phoenix/pig/PhoenixHBaseLoaderIT.java | 9 ++-- .../phoenix/pig/PhoenixHBaseStorerIT.java | 4 +- .../apache/phoenix/pig/PhoenixHBaseStorage.java | 51 ++++++++------------ 6 files changed, 47 insertions(+), 42 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/phoenix/blob/be5aba5b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java index 4347acd..92bb1d8 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java @@ -354,8 +354,7 @@ public class PhoenixRuntime { public static List<ColumnInfo> generateColumnInfo(Connection conn, String tableName, List<String> columns) throws SQLException { - - PTable table = PhoenixRuntime.getTable(conn, tableName); + PTable table = PhoenixRuntime.getTable(conn, SchemaUtil.normalizeFullTableName(tableName)); List<ColumnInfo> columnInfoList = Lists.newArrayList(); Set<String> unresolvedColumnNames = new TreeSet<String>(); if (columns == null) { http://git-wip-us.apache.org/repos/asf/phoenix/blob/be5aba5b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java index bc2141c..d7f9cea 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java @@ -201,8 +201,6 @@ public final class QueryUtil { if(columnInfos == null || columnInfos.isEmpty()) { throw new IllegalArgumentException("At least one column must be provided"); } - // escape the table name to ensure it is case sensitive. - final String escapedFullTableName = SchemaUtil.getEscapedFullTableName(fullTableName); StringBuilder query = new StringBuilder(); query.append("SELECT "); for (ColumnInfo cinfo : columnInfos) { @@ -215,7 +213,7 @@ public final class QueryUtil { // Remove the trailing comma query.setLength(query.length() - 1); query.append(" FROM "); - query.append(escapedFullTableName); + query.append(fullTableName); if(conditions != null && conditions.length() > 0) { query.append(" WHERE (").append(conditions).append(")"); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/be5aba5b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java index aff6b51..c674140 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java @@ -21,9 +21,8 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Strings.isNullOrEmpty; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES; -import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_FUNCTION_NAME_BYTES; - +import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES; import java.sql.SQLException; import java.sql.Statement; @@ -174,6 +173,21 @@ public class SchemaUtil { } return name.toUpperCase(); } + + /** + * Normalizes the fulltableName . Uses {@linkplain normalizeIdentifier} + * @param fullTableName + * @return + */ + public static String normalizeFullTableName(String fullTableName) { + String schemaName = SchemaUtil.getSchemaNameFromFullName(fullTableName); + String tableName = SchemaUtil.getTableNameFromFullName(fullTableName); + String normalizedTableName = StringUtil.EMPTY_STRING; + if(!schemaName.isEmpty()) { + normalizedTableName = normalizeIdentifier(schemaName) + QueryConstants.NAME_SEPARATOR; + } + return normalizedTableName + normalizeIdentifier(tableName); + } public static boolean isCaseSensitive(String name) { return name!=null && name.length() > 0 && name.charAt(0)=='"'; http://git-wip-us.apache.org/repos/asf/phoenix/blob/be5aba5b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java ---------------------------------------------------------------------- diff --git a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java index 594abe6..7fcf6ac 100644 --- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java +++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java @@ -74,6 +74,8 @@ public class PhoenixHBaseLoaderIT { private static final String TABLE_NAME = "A"; private static final String INDEX_NAME = "I"; private static final String TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, TABLE_NAME); + private static final String CASE_SENSITIVE_TABLE_NAME = SchemaUtil.getEscapedArgument("a"); + private static final String CASE_SENSITIVE_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME,CASE_SENSITIVE_TABLE_NAME); private static HBaseTestingUtility hbaseTestUtil; private static String zkQuorum; private static Connection conn; @@ -234,13 +236,13 @@ public class PhoenixHBaseLoaderIT { public void testDataForTable() throws Exception { //create the table - String ddl = "CREATE TABLE " + TABLE_FULL_NAME + String ddl = "CREATE TABLE " + CASE_SENSITIVE_TABLE_FULL_NAME + " (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR, AGE INTEGER) "; conn.createStatement().execute(ddl); //prepare data with 10 rows having age 25 and the other 30. - final String dml = "UPSERT INTO " + TABLE_FULL_NAME + " VALUES(?,?,?)"; + final String dml = "UPSERT INTO " + CASE_SENSITIVE_TABLE_FULL_NAME + " VALUES(?,?,?)"; PreparedStatement stmt = conn.prepareStatement(dml); int rows = 20; for(int i = 0 ; i < rows; i++) { @@ -253,7 +255,7 @@ public class PhoenixHBaseLoaderIT { //load data and filter rows whose age is > 25 pigServer.registerQuery(String.format( - "A = load 'hbase://table/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", TABLE_FULL_NAME, + "A = load 'hbase://table/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", CASE_SENSITIVE_TABLE_FULL_NAME, zkQuorum)); pigServer.registerQuery("B = FILTER A BY AGE > 25;"); @@ -603,6 +605,7 @@ public class PhoenixHBaseLoaderIT { @After public void tearDown() throws Exception { dropTable(TABLE_FULL_NAME); + dropTable(CASE_SENSITIVE_TABLE_FULL_NAME); pigServer.shutdown(); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/be5aba5b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java ---------------------------------------------------------------------- diff --git a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java index 67abf62..9106cdd 100644 --- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java +++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java @@ -33,6 +33,7 @@ import java.util.Collection; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT; +import org.apache.phoenix.util.SchemaUtil; import org.apache.pig.ExecType; import org.apache.pig.PigServer; import org.apache.pig.backend.executionengine.ExecJob.JOB_STATUS; @@ -140,12 +141,11 @@ public class PhoenixHBaseStorerIT extends BaseHBaseManagedTimeIT { */ @Test public void testStorerForSpecificColumns() throws Exception { - final String tableName = "TABLE2"; + final String tableName = SchemaUtil.getTableName("TABLE2", SchemaUtil.getEscapedArgument("zo2")); final Statement stmt = conn.createStatement(); stmt.execute("CREATE TABLE " + tableName + " (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR, AGE INTEGER)"); - final Data data = Storage.resetData(pigServer); final Collection<Tuple> list = Lists.newArrayList(); http://git-wip-us.apache.org/repos/asf/phoenix/blob/be5aba5b/phoenix-pig/src/main/java/org/apache/phoenix/pig/PhoenixHBaseStorage.java ---------------------------------------------------------------------- diff --git a/phoenix-pig/src/main/java/org/apache/phoenix/pig/PhoenixHBaseStorage.java b/phoenix-pig/src/main/java/org/apache/phoenix/pig/PhoenixHBaseStorage.java index 72d958b..4ada303 100644 --- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/PhoenixHBaseStorage.java +++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/PhoenixHBaseStorage.java @@ -18,8 +18,6 @@ package org.apache.phoenix.pig; import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; import java.sql.SQLException; import java.util.List; import java.util.Properties; @@ -33,12 +31,14 @@ import org.apache.commons.cli.ParseException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.phoenix.mapreduce.PhoenixOutputFormat; import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil; +import org.apache.phoenix.pig.util.TableSchemaParserFunction; import org.apache.phoenix.pig.writable.PhoenixPigDBWritable; import org.apache.phoenix.util.ColumnInfo; import org.apache.pig.ResourceSchema; @@ -88,12 +88,12 @@ public class PhoenixHBaseStorage implements StoreFuncInterface { private ResourceSchema schema; private long batchSize; private final PhoenixOutputFormat outputFormat = new PhoenixOutputFormat(); - // Set of options permitted private final static Options validOptions = new Options(); private final static CommandLineParser parser = new GnuParser(); private final static String SCHEMA = "_schema"; - + private final static String PHOENIX_TABLE_NAME_SCHEME = "hbase://"; + private final CommandLine configuredOptions; private final String server; @@ -134,33 +134,24 @@ public class PhoenixHBaseStorage implements StoreFuncInterface { */ @Override public void setStoreLocation(String location, Job job) throws IOException { - URI locationURI; - try { - locationURI = new URI(location); - if (!"hbase".equals(locationURI.getScheme())) { - throw new IOException(String.format("Location must use the hbase protocol, hbase://tableName[/columnList]. Supplied location=%s",location)); - } - - PhoenixConfigurationUtil.loadHBaseConfiguration(job); - config = job.getConfiguration(); - config.set(HConstants.ZOOKEEPER_QUORUM, server); - String tableName = locationURI.getAuthority(); - // strip off the leading path token '/' - String columns = null; - if(!locationURI.getPath().isEmpty()) { - columns = locationURI.getPath().substring(1); - PhoenixConfigurationUtil.setUpsertColumnNames(config, columns); - } - PhoenixConfigurationUtil.setOutputTableName(config,tableName); - PhoenixConfigurationUtil.setBatchSize(config,batchSize); - String serializedSchema = getUDFProperties().getProperty(contextSignature + SCHEMA); - if (serializedSchema != null) { - schema = (ResourceSchema) ObjectSerializer.deserialize(serializedSchema); - } - } catch (URISyntaxException e) { - throw new IOException(String.format("Location must use the hbase protocol, hbase://tableName[/columnList]. Supplied location=%s",location),e); + String tableSchema = location.substring(PHOENIX_TABLE_NAME_SCHEME.length()); + final TableSchemaParserFunction parseFunction = new TableSchemaParserFunction(); + Pair<String,String> pair = parseFunction.apply(tableSchema); + PhoenixConfigurationUtil.loadHBaseConfiguration(job); + config = job.getConfiguration(); + config.set(HConstants.ZOOKEEPER_QUORUM, server); + String tableName = pair.getFirst(); + String columns = pair.getSecond(); + if(columns != null && columns.length() > 0) { + PhoenixConfigurationUtil.setUpsertColumnNames(config, columns); } - } + PhoenixConfigurationUtil.setOutputTableName(config,tableName); + PhoenixConfigurationUtil.setBatchSize(config,batchSize); + String serializedSchema = getUDFProperties().getProperty(contextSignature + SCHEMA); + if (serializedSchema != null) { + schema = (ResourceSchema) ObjectSerializer.deserialize(serializedSchema); + } + } @SuppressWarnings("unchecked") @Override
