gokceni commented on a change in pull request #1397:
URL: https://github.com/apache/phoenix/pull/1397#discussion_r815191511



##########
File path: 
phoenix-core/src/main/java/org/apache/phoenix/mapreduce/transform/PhoenixTransformWithViewsInputFormat.java
##########
@@ -0,0 +1,116 @@
+package org.apache.phoenix.mapreduce.transform;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.lib.db.DBWritable;
+import org.apache.phoenix.compile.MutationPlan;
+import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.compile.ServerBuildTransformingTableCompiler;
+import org.apache.phoenix.coprocessor.TableInfo;
+import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.mapreduce.PhoenixInputFormat;
+import org.apache.phoenix.mapreduce.PhoenixServerBuildIndexInputFormat;
+import org.apache.phoenix.mapreduce.util.ConnectionUtil;
+import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
+import org.apache.phoenix.mapreduce.util.ViewInfoWritable;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.transform.Transform;
+import org.apache.phoenix.thirdparty.com.google.common.base.Strings;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.SchemaUtil;
+import org.apache.phoenix.util.StringUtil;
+import org.apache.phoenix.util.ViewUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CHILD_LINK_NAME_BYTES;
+import static 
org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil.getIndexToolIndexTableName;
+
+public class PhoenixTransformWithViewsInputFormat<T extends DBWritable> 
extends PhoenixServerBuildIndexInputFormat {
+    private static final Logger LOGGER =
+            
LoggerFactory.getLogger(PhoenixTransformWithViewsInputFormat.class);
+    @Override
+    public List<InputSplit> getSplits(JobContext context) throws IOException, 
InterruptedException {
+        final Configuration configuration = context.getConfiguration();
+        try (PhoenixConnection connection = (PhoenixConnection)
+                ConnectionUtil.getInputConnection(configuration)) {
+            try (Table hTable = 
connection.unwrap(PhoenixConnection.class).getQueryServices().getTable(
+                    
SchemaUtil.getPhysicalTableName(SYSTEM_CHILD_LINK_NAME_BYTES, 
configuration).toBytes())) {
+                String oldDataTableFullName = 
PhoenixConfigurationUtil.getIndexToolDataTableName(configuration);
+                String newDataTableFullName = 
getIndexToolIndexTableName(configuration);
+                PTable newDataTable = 
PhoenixRuntime.getTableNoCache(connection, newDataTableFullName);
+                String schemaName = 
SchemaUtil.getSchemaNameFromFullName(oldDataTableFullName);
+                String tableName = 
SchemaUtil.getTableNameFromFullName(oldDataTableFullName);
+                byte[] schemaNameBytes = Strings.isNullOrEmpty(schemaName) ? 
null : schemaName.getBytes();
+                Pair<List<PTable>, List<TableInfo>> allDescendantViews = 
ViewUtil.findAllDescendantViews(hTable, configuration, null, schemaNameBytes,
+                        tableName.getBytes(), 
EnvironmentEdgeManager.currentTimeMillis(), false);
+                List<PTable> legitimateDecendants = 
allDescendantViews.getFirst();
+
+                List<InputSplit> inputSplits = new ArrayList<>();
+
+                HashMap<String, PColumn> columnMap = new HashMap<>();
+                for (PColumn column : newDataTable.getColumns()) {
+                    columnMap.put(column.getName().getString(), column);
+                }
+
+                for (PTable decendant : legitimateDecendants) {
+                    if (decendant.getViewType() == PTable.ViewType.READ_ONLY) {
+                        continue;
+                    }
+                    PTable newView = Transform.getTransformedView(decendant, 
newDataTable, columnMap, true);
+                    QueryPlan queryPlan = getQueryPlan(newView, decendant, 
connection);
+                    inputSplits.addAll(generateSplits(queryPlan, 
configuration));

Review comment:
       Let me add a parameter to transform tool since it is 10 now (default).




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to