swaroopak commented on a change in pull request #848:
URL: https://github.com/apache/phoenix/pull/848#discussion_r462648136
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -1,35 +1,35 @@
package org.apache.phoenix.schema;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
+import com.google.inject.internal.util.$ImmutableCollection;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.mapreduce.util.ConnectionUtil;
import org.apache.phoenix.query.ConnectionQueryServices;
+import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.util.MetaDataUtil;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.SchemaUtil;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
public class SchemaExtractionProcessor {
- public static final String
- FEATURE_NOT_SUPPORTED_ON_EXTRACTION_TOOL =
- "Multiple CF feature not supported on extraction tool";
+ public static final List<String>
SYNCED_DATA_TABLE_AND_INDEX_COL_FAM_PROPERTIES = ImmutableList.of(
Review comment:
This should be available in other parts of the code. You can just import
here.
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -1,35 +1,35 @@
package org.apache.phoenix.schema;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
+import com.google.inject.internal.util.$ImmutableCollection;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.mapreduce.util.ConnectionUtil;
import org.apache.phoenix.query.ConnectionQueryServices;
+import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.util.MetaDataUtil;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.SchemaUtil;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
Review comment:
Avoid import *
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
Review comment:
nit: cfMap
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
+ Set<ImmutableBytesWritable> set = new
HashSet<ImmutableBytesWritable>();
+ for(HColumnDescriptor columnDescriptor: columnDescriptors){
Review comment:
space before `{`
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
+ Set<ImmutableBytesWritable> set = new
HashSet<ImmutableBytesWritable>();
+ for(HColumnDescriptor columnDescriptor: columnDescriptors){
+ String columnFamilyName =
Bytes.toString(columnDescriptor.getName());
+ ImmutableBytesWritable value =
columnDescriptor.getValues().get(key);
+ // check if it is universal properties
+ if
(SYNCED_DATA_TABLE_AND_INDEX_COL_FAM_PROPERTIES.contains(Bytes.toString(key.get()))){
+ definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ continue;
+ }
+ CFMap.put(columnFamilyName, Bytes.toString(value.get()));
+ set.add(value);
+ }
+ if (set.size() > 1){
Review comment:
space before {
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
+ Set<ImmutableBytesWritable> set = new
HashSet<ImmutableBytesWritable>();
+ for(HColumnDescriptor columnDescriptor: columnDescriptors){
+ String columnFamilyName =
Bytes.toString(columnDescriptor.getName());
+ ImmutableBytesWritable value =
columnDescriptor.getValues().get(key);
+ // check if it is universal properties
+ if
(SYNCED_DATA_TABLE_AND_INDEX_COL_FAM_PROPERTIES.contains(Bytes.toString(key.get()))){
+ definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ continue;
+ }
+ CFMap.put(columnFamilyName, Bytes.toString(value.get()));
+ set.add(value);
+ }
+ if (set.size() > 1){
Review comment:
space before `{` and at other similar spots
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
Review comment:
let's call it globalValue
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
+ Set<ImmutableBytesWritable> set = new
HashSet<ImmutableBytesWritable>();
+ for(HColumnDescriptor columnDescriptor: columnDescriptors){
+ String columnFamilyName =
Bytes.toString(columnDescriptor.getName());
+ ImmutableBytesWritable value =
columnDescriptor.getValues().get(key);
+ // check if it is universal properties
+ if
(SYNCED_DATA_TABLE_AND_INDEX_COL_FAM_PROPERTIES.contains(Bytes.toString(key.get()))){
+ definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ continue;
+ }
+ CFMap.put(columnFamilyName, Bytes.toString(value.get()));
+ set.add(value);
+ }
+ if (set.size() > 1){
Review comment:
this would fail if two CF have same non-default value for `VERSIONS` or
any other property
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
+ Set<ImmutableBytesWritable> set = new
HashSet<ImmutableBytesWritable>();
+ for(HColumnDescriptor columnDescriptor: columnDescriptors){
+ String columnFamilyName =
Bytes.toString(columnDescriptor.getName());
+ ImmutableBytesWritable value =
columnDescriptor.getValues().get(key);
+ // check if it is universal properties
+ if
(SYNCED_DATA_TABLE_AND_INDEX_COL_FAM_PROPERTIES.contains(Bytes.toString(key.get()))){
+ definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ continue;
+ }
+ CFMap.put(columnFamilyName, Bytes.toString(value.get()));
+ set.add(value);
+ }
+ if (set.size() > 1){
Review comment:
this would fail if two CF have same non-default value for `VERSIONS` or
any other property
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
+ Set<ImmutableBytesWritable> set = new
HashSet<ImmutableBytesWritable>();
Review comment:
nit: cfPropertyValueSet
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
Review comment:
nit: cfMap or cfToPropertyValueMap
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
+ Set<ImmutableBytesWritable> set = new
HashSet<ImmutableBytesWritable>();
+ for(HColumnDescriptor columnDescriptor: columnDescriptors){
+ String columnFamilyName =
Bytes.toString(columnDescriptor.getName());
+ ImmutableBytesWritable value =
columnDescriptor.getValues().get(key);
+ // check if it is universal properties
+ if
(SYNCED_DATA_TABLE_AND_INDEX_COL_FAM_PROPERTIES.contains(Bytes.toString(key.get()))){
+ definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ continue;
+ }
+ CFMap.put(columnFamilyName, Bytes.toString(value.get()));
+ set.add(value);
+ }
+ if (set.size() > 1){
Review comment:
Let's say there are 3 CFs and only 2 of them have VERSIONS=2. Would this
work? Let's add a test if we don't have one to cover this scenario.
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
+ Set<ImmutableBytesWritable> set = new
HashSet<ImmutableBytesWritable>();
+ for(HColumnDescriptor columnDescriptor: columnDescriptors){
+ String columnFamilyName =
Bytes.toString(columnDescriptor.getName());
+ ImmutableBytesWritable value =
columnDescriptor.getValues().get(key);
+ // check if it is universal properties
+ if
(SYNCED_DATA_TABLE_AND_INDEX_COL_FAM_PROPERTIES.contains(Bytes.toString(key.get()))){
+ definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ continue;
+ }
+ CFMap.put(columnFamilyName, Bytes.toString(value.get()));
+ set.add(value);
Review comment:
I think we can maintain a map instead of [propertyValue and occurances]
and execute line no. 273 only when exactly one entry from the map has
occurrences equal to the no. of column families.
Also, good to refactor this logic in another function.
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -250,12 +247,31 @@ private void setHTableProperties(HTableDescriptor htd) {
}
}
- private void setHColumnFamilyProperties(HColumnDescriptor
columnDescriptor) {
- Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptor.getValues();
- for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()) {
+ private void setHColumnFamilyProperties(HColumnDescriptor[]
columnDescriptors) {
+ Map<ImmutableBytesWritable, ImmutableBytesWritable> propsMap =
columnDescriptors[0].getValues();
+ for(Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry :
propsMap.entrySet()){
ImmutableBytesWritable key = entry.getKey();
- ImmutableBytesWritable value = entry.getValue();
- definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ ImmutableBytesWritable defaultValue = entry.getValue();
+ Map<String, String> CFMap = new HashMap<String, String>();
+ Set<ImmutableBytesWritable> set = new
HashSet<ImmutableBytesWritable>();
+ for(HColumnDescriptor columnDescriptor: columnDescriptors){
+ String columnFamilyName =
Bytes.toString(columnDescriptor.getName());
+ ImmutableBytesWritable value =
columnDescriptor.getValues().get(key);
+ // check if it is universal properties
+ if
(SYNCED_DATA_TABLE_AND_INDEX_COL_FAM_PROPERTIES.contains(Bytes.toString(key.get()))){
+ definedProps.put(Bytes.toString(key.get()),
Bytes.toString(value.get()));
+ continue;
+ }
+ CFMap.put(columnFamilyName, Bytes.toString(value.get()));
+ set.add(value);
Review comment:
I think we can maintain a map of [propertyValue and occurances] and
execute line no. 273 only when exactly one entry from the map has occurrences
equal to the no. of column families.
Also, good to refactor this logic in another function.
##########
File path:
phoenix-tools/src/main/java/org/apache/phoenix/schema/SchemaExtractionProcessor.java
##########
@@ -374,20 +397,49 @@ private String getColumnInfoStringForView(PTable table,
PTable baseTable) {
private String extractColumn(PColumn column) {
String colName = column.getName().getString();
+ if (column.getFamilyName() != null){
+ String colFamilyName = column.getFamilyName().getString();
+ // check if it is default column family name
+ colName =
colFamilyName.equals(QueryConstants.DEFAULT_COLUMN_FAMILY)? colName :
String.format("\"%s\".\"%s\"", colFamilyName, colName);
+ }
+ boolean isArrayType = column.getDataType().isArrayType();
String type = column.getDataType().getSqlTypeName();
+ Integer maxLength = column.getMaxLength();
+ Integer arrSize = column.getArraySize();
+ Integer scale = column.getScale();
StringBuilder buf = new StringBuilder(colName);
buf.append(' ');
- buf.append(type);
- Integer maxLength = column.getMaxLength();
- if (maxLength != null) {
- buf.append('(');
- buf.append(maxLength);
- Integer scale = column.getScale();
- if (scale != null) {
- buf.append(',');
- buf.append(scale); // has both max length and scale. For ex-
decimal(10,2)
+
+ if (isArrayType) {
+ String arrayPrefix = type.split("\\s+")[0];
+ buf.append(arrayPrefix);
+ if (maxLength != null) {
Review comment:
nit: this if condition is duplicated in the else part. Please refactor
it by extracting common parts and executing specific Array related logic when
if clause passes.
##########
File path:
phoenix-tools/src/it/java/org/apache/phoenix/schema/SchemaExtractionToolIT.java
##########
@@ -210,4 +206,188 @@ public void testCreateViewIndexStatement() throws
Exception {
Assert.assertEquals(createIndexStatement.toUpperCase(),
set.getOutput().toUpperCase());
}
}
+
+ @Test
+ public void testSaltedTableStatement() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_integer integer not null CONSTRAINT pk PRIMARY KEY
(a_integer)) SALT_BUCKETS=16";
+ conn.createStatement().execute(query);
+ conn.commit();
+ String [] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+ String actualProperties =
set.getOutput().substring(set.getOutput().lastIndexOf(")")+1);
+ Assert.assertEquals(true,
actualProperties.contains("SALT_BUCKETS=16"));
+ }
+ }
+
+ @Test
+ public void testCreateTableWithPKConstraint() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(15) NOT NULL, " +
+ "c_bigint BIGINT NOT NULL CONSTRAINT PK PRIMARY KEY
(a_char, b_char, c_bigint)) IMMUTABLE_ROWS=TRUE";
+ conn.createStatement().execute(query);
+ conn.commit();
+ String [] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(query.toUpperCase(),
set.getOutput().toUpperCase());
+ }
+ }
+
+ @Test
+ public void testCreateTableWithArrayColumn() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(10) NOT NULL, " +
+ "c_var_array VARCHAR ARRAY, " +
+ "d_char_array CHAR(15) ARRAY[3] CONSTRAINT PK PRIMARY KEY
(a_char, b_char)) " +
+ "TTL=2592000,
IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN, DISABLE_TABLE_SOR=true,
REPLICATION_SCOPE=1";
+ conn.createStatement().execute(query);
+ conn.commit();
+ String[] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(query.toUpperCase(),
set.getOutput().toUpperCase());
+ }
+ }
+
+ @Test
+ public void testCreateTableWithNonDefaultColumnFamily() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(10) NOT NULL, " +
+ "\"av\".\"_\" CHAR(1), " +
+ "\"bv\".\"_\" CHAR(1), " +
+ "\"cv\".\"_\" CHAR(1), " +
+ "\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char,
b_char)) " +
+ "TTL=1209600, IMMUTABLE_ROWS=true,
IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN, SALT_BUCKETS=16,
DISABLE_TABLE_SOR=true, MULTI_TENANT=true";
+ conn.createStatement().execute(query);
+ conn.commit();
+ String[] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(query.toUpperCase(),
set.getOutput().toUpperCase());
+ }
+ }
+
+ @Test
+ public void testCreateTableWithUniversalCFProperties() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+ String properties = "KEEP_DELETED_CELLS=TRUE, TTL=1209600,
IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN, REPLICATION_SCOPE=1";
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(10) NOT NULL, " +
+ "\"av\".\"_\" CHAR(1), " +
+ "\"bv\".\"_\" CHAR(1), " +
+ "\"cv\".\"_\" CHAR(1), " +
+ "\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char,
b_char)) " + properties;
+ conn.createStatement().execute(query);
+ conn.commit();
+ String[] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(query.toUpperCase(),
set.getOutput().toUpperCase());
+ }
+ }
+
+
+ @Test
+ public void testCreateTableWithMultipleCFs() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+ String properties = "\"av\".VERSIONS=2, \"bv\".VERSIONS=3, " +
+ "\"cv\".VERSIONS=4, DATA_BLOCK_ENCODING=DIFF, " +
+ "IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN,
SALT_BUCKETS=16, DISABLE_TABLE_SOR=true, MULTI_TENANT=true";
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(10) NOT NULL, " +
+ "\"av\".\"_\" CHAR(1), " +
+ "\"bv\".\"_\" CHAR(1), " +
+ "\"cv\".\"_\" CHAR(1), " +
+ "\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char,
b_char)) " + properties;
+ conn.createStatement().execute(query);
+ conn.commit();
+ String[] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(true, compareProperties(properties,
set.getOutput().substring(set.getOutput().lastIndexOf(")")+1)));
+ }
+ }
+
+ @Test
+ public void testCreateIndexStatementWithColumnFamily() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ String indexName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ conn.createStatement().execute("CREATE TABLE "+pTableFullName +
"(k VARCHAR NOT NULL PRIMARY KEY, \"av\".\"_\" CHAR(1), v2 VARCHAR)");
+ String createIndexStatement = "CREATE INDEX "+ indexName + " ON
"+pTableFullName+ "(\"av\".\"_\")";
+ conn.createStatement().execute(createIndexStatement);
+ conn.commit();
+
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ String [] args2 = {"-tb", indexName, "-s", schemaName};
+ set.run(args2);
+ Assert.assertEquals(createIndexStatement.toUpperCase(),
set.getOutput().toUpperCase());
+ }
+ }
+
+ private boolean compareProperties(String prop1, String prop2){
Review comment:
Thanks for adding this.
##########
File path:
phoenix-tools/src/it/java/org/apache/phoenix/schema/SchemaExtractionToolIT.java
##########
@@ -33,22 +30,21 @@ public void testCreateTableStatement() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+ String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName,
tableName);
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
+ String createTable = "CREATE TABLE "+ pTableFullName + "(K VARCHAR NOT
NULL PRIMARY KEY, "
+ + "V1 VARCHAR, V2 VARCHAR) TTL=2592000, IMMUTABLE_ROWS=TRUE,
DISABLE_WAL=TRUE";
try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
-
- String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
- conn.createStatement().execute("CREATE TABLE "+ pTableFullName +
"(k VARCHAR NOT NULL PRIMARY KEY, "
- + "v1 VARCHAR, v2 VARCHAR)"
- + properties);
+ conn.createStatement().execute(createTable);
conn.commit();
String [] args = {"-tb", tableName, "-s", schemaName};
SchemaExtractionTool set = new SchemaExtractionTool();
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
set.run(args);
- String actualProperties =
set.getOutput().substring(set.getOutput().lastIndexOf(")")+1).replace(" ","");
- Assert.assertEquals(3, actualProperties.split(",").length);
+ //String actualProperties =
set.getOutput().substring(set.getOutput().lastIndexOf(")")+1).replace(" ","");
Review comment:
nit: remove the commented code
##########
File path:
phoenix-tools/src/it/java/org/apache/phoenix/schema/SchemaExtractionToolIT.java
##########
@@ -210,4 +206,188 @@ public void testCreateViewIndexStatement() throws
Exception {
Assert.assertEquals(createIndexStatement.toUpperCase(),
set.getOutput().toUpperCase());
}
}
+
+ @Test
+ public void testSaltedTableStatement() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_integer integer not null CONSTRAINT pk PRIMARY KEY
(a_integer)) SALT_BUCKETS=16";
+ conn.createStatement().execute(query);
+ conn.commit();
+ String [] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+ String actualProperties =
set.getOutput().substring(set.getOutput().lastIndexOf(")")+1);
+ Assert.assertEquals(true,
actualProperties.contains("SALT_BUCKETS=16"));
+ }
+ }
+
+ @Test
+ public void testCreateTableWithPKConstraint() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(15) NOT NULL, " +
+ "c_bigint BIGINT NOT NULL CONSTRAINT PK PRIMARY KEY
(a_char, b_char, c_bigint)) IMMUTABLE_ROWS=TRUE";
+ conn.createStatement().execute(query);
+ conn.commit();
+ String [] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(query.toUpperCase(),
set.getOutput().toUpperCase());
+ }
+ }
+
+ @Test
+ public void testCreateTableWithArrayColumn() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(10) NOT NULL, " +
+ "c_var_array VARCHAR ARRAY, " +
+ "d_char_array CHAR(15) ARRAY[3] CONSTRAINT PK PRIMARY KEY
(a_char, b_char)) " +
+ "TTL=2592000,
IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN, DISABLE_TABLE_SOR=true,
REPLICATION_SCOPE=1";
+ conn.createStatement().execute(query);
+ conn.commit();
+ String[] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(query.toUpperCase(),
set.getOutput().toUpperCase());
+ }
+ }
+
+ @Test
+ public void testCreateTableWithNonDefaultColumnFamily() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(10) NOT NULL, " +
+ "\"av\".\"_\" CHAR(1), " +
+ "\"bv\".\"_\" CHAR(1), " +
+ "\"cv\".\"_\" CHAR(1), " +
+ "\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char,
b_char)) " +
+ "TTL=1209600, IMMUTABLE_ROWS=true,
IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN, SALT_BUCKETS=16,
DISABLE_TABLE_SOR=true, MULTI_TENANT=true";
+ conn.createStatement().execute(query);
+ conn.commit();
+ String[] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(query.toUpperCase(),
set.getOutput().toUpperCase());
+ }
+ }
+
+ @Test
+ public void testCreateTableWithUniversalCFProperties() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+ String properties = "KEEP_DELETED_CELLS=TRUE, TTL=1209600,
IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN, REPLICATION_SCOPE=1";
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(10) NOT NULL, " +
+ "\"av\".\"_\" CHAR(1), " +
+ "\"bv\".\"_\" CHAR(1), " +
+ "\"cv\".\"_\" CHAR(1), " +
+ "\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char,
b_char)) " + properties;
+ conn.createStatement().execute(query);
+ conn.commit();
+ String[] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(query.toUpperCase(),
set.getOutput().toUpperCase());
+ }
+ }
+
+
+ @Test
+ public void testCreateTableWithMultipleCFs() throws Exception {
+ String tableName = generateUniqueName();
+ String schemaName = generateUniqueName();
+ Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+ String properties = "\"av\".VERSIONS=2, \"bv\".VERSIONS=3, " +
+ "\"cv\".VERSIONS=4, DATA_BLOCK_ENCODING=DIFF, " +
+ "IMMUTABLE_STORAGE_SCHEME=ONE_CELL_PER_COLUMN,
SALT_BUCKETS=16, DISABLE_TABLE_SOR=true, MULTI_TENANT=true";
+
+ try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+ String pTableFullName =
SchemaUtil.getQualifiedTableName(schemaName, tableName);
+ String query = "create table " + pTableFullName +
+ "(a_char CHAR(15) NOT NULL, " +
+ "b_char CHAR(10) NOT NULL, " +
+ "\"av\".\"_\" CHAR(1), " +
+ "\"bv\".\"_\" CHAR(1), " +
+ "\"cv\".\"_\" CHAR(1), " +
+ "\"dv\".\"_\" CHAR(1) CONSTRAINT PK PRIMARY KEY (a_char,
b_char)) " + properties;
+ conn.createStatement().execute(query);
+ conn.commit();
+ String[] args = {"-tb", tableName, "-s", schemaName};
+ SchemaExtractionTool set = new SchemaExtractionTool();
+
set.setConf(conn.unwrap(PhoenixConnection.class).getQueryServices().getConfiguration());
+ set.run(args);
+
+ Assert.assertEquals(true, compareProperties(properties,
set.getOutput().substring(set.getOutput().lastIndexOf(")")+1)));
Review comment:
can use assertTrue directly.
##########
File path:
phoenix-tools/src/it/java/org/apache/phoenix/schema/SchemaExtractionToolIT.java
##########
@@ -33,22 +30,21 @@ public void testCreateTableStatement() throws Exception {
String tableName = generateUniqueName();
String schemaName = generateUniqueName();
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+ String pTableFullName = SchemaUtil.getQualifiedTableName(schemaName,
tableName);
String properties = "TTL=2592000,IMMUTABLE_ROWS=true,DISABLE_WAL=true";
Review comment:
nit: could you please remove this variable? It is not used anymore.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]