umustafi commented on a change in pull request #3460:
URL: https://github.com/apache/gobblin/pull/3460#discussion_r801028000



##########
File path: 
gobblin-data-management/src/test/java/org/apache/gobblin/data/management/copy/hive/HiveDatasetTest.java
##########
@@ -156,15 +165,67 @@ public void testResolveConfig() throws IOException {
     HiveDatasetFinder.DbAndTable logicalDbAndTable = new 
HiveDatasetFinder.DbAndTable("logicalDb", "logicalTable");
     Config resolvedConfig = HiveDataset.resolveConfig(config, realDbAndTable, 
logicalDbAndTable);
 
-    
Assert.assertEquals(resolvedConfig.getString(DUMMY_CONFIG_KEY_WITH_DB_TOKEN),
-        "resPrefix_realDb_resPostfix", "Real DB not resolved correctly");
-    
Assert.assertEquals(resolvedConfig.getString(DUMMY_CONFIG_KEY_WITH_TABLE_TOKEN),
-        "resPrefix_realTable_resPostfix", "Real Table not resolved correctly");
+    
Assert.assertEquals(resolvedConfig.getString(DUMMY_CONFIG_KEY_WITH_DB_TOKEN), 
"resPrefix_realDb_resPostfix",
+        "Real DB not resolved correctly");
+    
Assert.assertEquals(resolvedConfig.getString(DUMMY_CONFIG_KEY_WITH_TABLE_TOKEN),
 "resPrefix_realTable_resPostfix",
+        "Real Table not resolved correctly");
 
     
Assert.assertEquals(resolvedConfig.getString(HiveDatasetVersionCleaner.REPLACEMENT_HIVE_DB_NAME_KEY),
         "resPrefix_logicalDb_resPostfix", "Logical DB not resolved correctly");
     
Assert.assertEquals(resolvedConfig.getString(HiveDatasetVersionCleaner.REPLACEMENT_HIVE_TABLE_NAME_KEY),
         "resPrefix_logicalTable_resPostfix", "Logical Table not resolved 
correctly");
-    
Assert.assertEquals(resolvedConfig.getString(DUMMY_CONFIG_KEY_WITH_STRIP_SUFFIX),"testRoot");
+    
Assert.assertEquals(resolvedConfig.getString(DUMMY_CONFIG_KEY_WITH_STRIP_SUFFIX),
 "testRoot");
+  }
+
+  @Test(expectedExceptions = RuntimeException.class)
+  public void testThrowsErrorIfCopyEntityHelperFails() throws Exception {
+    Properties copyProperties = new Properties();
+    Properties hiveProperties = new Properties();
+    copyProperties.put(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, "/target");
+    // Invoke an IOException by passing in a class that does not exist to 
HiveCopyEntityHelper constructor
+    hiveProperties.put(HiveCopyEntityHelper.COPY_PARTITION_FILTER_GENERATOR, 
"missingClass");
+    Table table = new Table(Table.getEmptyTable("testDB", "testTable"));
+    HiveMetastoreClientPool pool = HiveMetastoreClientPool.get(new 
Properties(), Optional.absent());
+
+    HiveDataset passingDataset = new HiveDataset(new LocalFileSystem(), pool, 
table, hiveProperties);
+    // Even though IOException is thrown HiveDataset should silence it due to 
not having the configuration flag
+    try {
+      CopyConfiguration copyConfigWithoutAbortKey = 
CopyConfiguration.builder(new LocalFileSystem(), copyProperties).build();
+      passingDataset.getFileSetIterator(FileSystem.getLocal(new 
Configuration()), copyConfigWithoutAbortKey);
+    } catch (Exception e) {
+      Assert.fail("IOException should log and fail silently since it is not 
configured");
+    }
+
+    // Exception should propagate to a RuntimeException since flag is enabled
+    copyProperties.put(CopyConfiguration.ABORT_ON_SINGLE_DATASET_FAILURE, 
"true");
+    HiveDataset failingDataset = new HiveDataset(new LocalFileSystem(), pool, 
table, hiveProperties);
+    CopyConfiguration copyConfiguration = CopyConfiguration.builder(new 
LocalFileSystem(), copyProperties).build();
+    failingDataset.getFileSetIterator(FileSystem.getLocal(new 
Configuration()), copyConfiguration);
+  }
+
+  @Test(expectedExceptions = RuntimeException.class)
+  public void testThrowsErrorIfTableNotCopyable() throws Exception {
+    Properties copyProperties = new Properties();
+    Properties hiveProperties = new Properties();
+    copyProperties.put(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, "/target");
+    Table table = new Table(Table.getEmptyTable("testDB", "testTable"));
+    // Virtual view tables are not copyable
+    table.setTableType(TableType.VIRTUAL_VIEW);
+    HiveMetastoreClientPool pool = HiveMetastoreClientPool.get(new 
Properties(), Optional.absent());
+
+    HiveDataset passingDataset = new HiveDataset(new LocalFileSystem(), pool, 
table, hiveProperties);
+    // Since flag is not enabled the dataset should log an error and continue

Review comment:
       very clearly defined tests, nice job!




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to