aasha commented on a change in pull request #2043:
URL: https://github.com/apache/hive/pull/2043#discussion_r633565588



##########
File path: 
itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosUsingSnapshots.java
##########
@@ -1258,6 +1259,134 @@ private void validateDiffSnapshotsCreated(String 
location) throws Exception {
         dfs.getFileStatus(new Path(locationPath, ".snapshot/" + 
secondSnapshot(primaryDbName.toLowerCase()))));
   }
 
+  @Test
+  public void testSnapshotsWithFiltersCustomDbLevelPaths() throws Throwable {
+    // Directory Structure:
+    //    /prefix/project/   <- Specified as custom Location.(Snapshot Root)
+    //                        /randomStuff <- Not to be copied as part of 
external data copy
+    //                        /warehouse1 <- To be copied, Contains table1 & 
table2
+    //                       /warehouse2 <- To be copied, Contains table3 & 
table4
+
+    // Create /prefix/project
+    Path project = new Path("/" + testName.getMethodName() + "/project");
+    DistributedFileSystem fs = primary.miniDFSCluster.getFileSystem();
+    fs.mkdirs(project);
+
+    // Create /prefix/project/warehouse1
+    Path warehouse1 = new Path(project, "warehouse1");
+    fs.mkdirs(warehouse1);
+
+    // Create /prefix/project/warehouse2
+    Path warehouse2 = new Path(project, "warehouse2");
+    fs.mkdirs(warehouse2);
+
+    // Table1 Path: /prefix/project/warehouse1/table1
+    Path table1 = new Path(warehouse1, "table1");
+    fs.mkdirs(table1);
+
+    // Table2 Path: /prefix/project/warehouse1/table2
+    Path table2 = new Path(warehouse1, "table2");
+    fs.mkdirs(table2);
+
+    // Table3 Path: /prefix/project/warehouse2/table3
+    Path table3 = new Path(warehouse2, "table3");
+    fs.mkdirs(table3);
+
+    // Table4 Path: /prefix/project/warehouse2/table4
+    Path table4 = new Path(warehouse2, "table4");
+    fs.mkdirs(table4);
+
+    // Random Dir inside the /prefix/project
+    Path random = new Path(project, "randomStuff");
+    fs.mkdirs(random);
+
+    fs.create(new Path(random, "file1")).close();
+    fs.create(new Path(random, "file2")).close();
+    fs.create(new Path(random, "file3")).close();
+
+    // Create a filter file for DistCp
+    Path filterFile = new Path("/tmp/filter");
+    try(FSDataOutputStream stream = fs.create(filterFile)) {
+      stream.writeBytes(".*randomStuff.*");
+    }
+    assertTrue(fs.exists(filterFile.makeQualified(fs.getUri(), 
fs.getWorkingDirectory())));
+    FileWriter myWriter = new FileWriter("/tmp/filter");
+    myWriter.write(".*randomStuff.*");
+    myWriter.close();
+
+    // Specify the project directory as the snapshot root using the single 
copy task path config.
+    List<String> withClause = 
ReplicationTestUtils.includeExternalTableClause(true);
+    withClause.add("'"
+        + REPL_EXTERNAL_WAREHOUSE_SINGLE_COPY_TASK_PATHS.varname + "'='" + 
project
+        .makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString() + 
"'");
+
+    // Add Filter file
+    withClause.add("'distcp.options.filters'='" + "/tmp/filter" + "'");

Review comment:
       Clean up the filter file after the test

##########
File path: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
##########
@@ -675,6 +675,16 @@ private static void populateLlapDaemonVarsSet(Set<String> 
llapDaemonVarsSetLocal
         + " table or partition level. If hive.exec.parallel \n"
         + "is set to true then max worker threads created for copy can be 
hive.exec.parallel.thread.number(determines \n"
         + "number of copy tasks in parallel) * hive.repl.parallel.copy.tasks 
"),
+    
REPL_SNAPSHOT_DIFF_FOR_EXTERNAL_TABLE_COPY("hive.repl.externaltable.snapshotdiff.copy",
+        false,"Use snapshot diff for copying data from source to "
+        + "destination cluster for external table in distcp. If true it uses 
snapshot based distcp for all the paths "
+        + "configured as part of hive.repl.external.warehouse.single.copy.task 
along with the external warehouse "
+        + "default location."),
+    
REPL_SNAPSHOT_OVERWRITE_TARGET_FOR_EXTERNAL_TABLE_COPY("hive.repl.externaltable.snapshot.overwrite.target",

Review comment:
       where are you not taking the custom location paths?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org
For additional commands, e-mail: gitbox-h...@hive.apache.org

Reply via email to