maheshk114 commented on code in PR #3293:
URL: https://github.com/apache/hive/pull/3293#discussion_r885142159


##########
ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/DumpMetaData.java:
##########
@@ -35,33 +40,64 @@
 import java.util.Arrays;
 import java.util.List;
 
+@JsonIgnoreProperties(ignoreUnknown = true)
 public class DumpMetaData {
   // wrapper class for reading and writing metadata about a dump
   // responsible for _dumpmetadata files
   public static final String DUMP_METADATA = "_dumpmetadata";
+
+  // New version of dump metadata file to store top level dumpmetadata content 
in JSON format
+  public static final String DUMP_METADATA_V2 = "_dumpmetadata_v2";
   private static final Logger LOG = 
LoggerFactory.getLogger(DumpMetaData.class);
+  private static ObjectMapper JSON_OBJECT_MAPPER = new ObjectMapper(); // 
Thread-safe.
 
+  @JsonProperty
   private DumpType dumpType;
+  @JsonProperty
   private Long eventFrom = null;
+  @JsonProperty
   private Long eventTo = null;
+  @JsonProperty
   private Path cmRoot;
+  @JsonProperty
   private String payload = null;
-  private ReplScope replScope = null;
-
-  private boolean initialized = false;
-  private final Path dumpFile;
-  private final HiveConf hiveConf;
+  @JsonProperty
   private Long dumpExecutionId;
+  @JsonProperty
   private boolean replScopeModified = false;
+  @JsonProperty
+  private String replScopeStr = null;
+  //Ignore rest of the properties
+  @JsonIgnore
+  private ReplScope replScope = null;
+  @JsonIgnore
+  private Path dumpFile;
+  @JsonIgnore
+  private final HiveConf hiveConf;
+  @JsonIgnore
+  private boolean isTopLevel;
+  @JsonIgnore
+  private Path dumpRoot;
+  @JsonIgnore
+  private boolean initialized = false;
+
+  public DumpMetaData() {
+    //to be instantiated by JSON ObjectMapper.
+    hiveConf = null;
+  }
 
   public DumpMetaData(Path dumpRoot, HiveConf hiveConf) {
-    this.hiveConf = hiveConf;
-    dumpFile = new Path(dumpRoot, DUMP_METADATA);
+    this(dumpRoot, hiveConf, false);

Review Comment:
   why this old version dump is still required ?



##########
itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java:
##########
@@ -2116,6 +2119,70 @@ public void testIncrementalLoadWithOneFailedDump() 
throws IOException {
     verifyRun("SELECT a from " + replDbName + ".ptned WHERE b=2", ptnData2, 
driverMirror);
   }
 
+  @Test

Review Comment:
   Add a test to verify the load from an old dump file. Don't use the code to 
dump. Use an old dump directory and the data + meta data should be loaded 
properly. This ha to be for both bootstrap and incremental load.



##########
ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/DumpMetaData.java:
##########
@@ -35,33 +40,64 @@
 import java.util.Arrays;
 import java.util.List;
 
+@JsonIgnoreProperties(ignoreUnknown = true)
 public class DumpMetaData {
   // wrapper class for reading and writing metadata about a dump
   // responsible for _dumpmetadata files
   public static final String DUMP_METADATA = "_dumpmetadata";
+
+  // New version of dump metadata file to store top level dumpmetadata content 
in JSON format
+  public static final String DUMP_METADATA_V2 = "_dumpmetadata_v2";
   private static final Logger LOG = 
LoggerFactory.getLogger(DumpMetaData.class);
+  private static ObjectMapper JSON_OBJECT_MAPPER = new ObjectMapper(); // 
Thread-safe.
 
+  @JsonProperty
   private DumpType dumpType;
+  @JsonProperty
   private Long eventFrom = null;
+  @JsonProperty
   private Long eventTo = null;
+  @JsonProperty
   private Path cmRoot;
+  @JsonProperty
   private String payload = null;
-  private ReplScope replScope = null;
-
-  private boolean initialized = false;
-  private final Path dumpFile;
-  private final HiveConf hiveConf;
+  @JsonProperty
   private Long dumpExecutionId;
+  @JsonProperty
   private boolean replScopeModified = false;
+  @JsonProperty
+  private String replScopeStr = null;
+  //Ignore rest of the properties
+  @JsonIgnore
+  private ReplScope replScope = null;
+  @JsonIgnore
+  private Path dumpFile;
+  @JsonIgnore
+  private final HiveConf hiveConf;
+  @JsonIgnore
+  private boolean isTopLevel;
+  @JsonIgnore
+  private Path dumpRoot;
+  @JsonIgnore
+  private boolean initialized = false;
+
+  public DumpMetaData() {
+    //to be instantiated by JSON ObjectMapper.
+    hiveConf = null;
+  }
 
   public DumpMetaData(Path dumpRoot, HiveConf hiveConf) {
-    this.hiveConf = hiveConf;
-    dumpFile = new Path(dumpRoot, DUMP_METADATA);
+    this(dumpRoot, hiveConf, false);
   }
 
+  public DumpMetaData(Path dumpRoot, HiveConf hiveConf, boolean isTopLevel) {

Review Comment:
   Then name it accordingly 



##########
ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java:
##########
@@ -205,7 +205,8 @@ public int execute() {
         if (previousValidHiveDumpPath == null && !isFailover) {
           work.setBootstrap(true);
         } else {
-          work.setOldReplScope(isFailover ? null : new 
DumpMetaData(previousValidHiveDumpPath, conf).getReplScope());
+          work.setOldReplScope(isFailover ? null : new 
DumpMetaData(previousValidHiveDumpPath, conf,
+                  ReplDumpWork.isTopLevelDmd()).getReplScope());

Review Comment:
   This new parameter isTopLevel can be removed ..It should always dump as V2. 
If there is any requirement to dump in old format, use a config to detect that.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to