[ 
https://issues.apache.org/jira/browse/HIVE-24884?focusedWorklogId=776074&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-776074
 ]

ASF GitHub Bot logged work on HIVE-24884:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 31/May/22 02:01
            Start Date: 31/May/22 02:01
    Worklog Time Spent: 10m 
      Work Description: maheshk114 commented on code in PR #3293:
URL: https://github.com/apache/hive/pull/3293#discussion_r885142159


##########
ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/DumpMetaData.java:
##########
@@ -35,33 +40,64 @@
 import java.util.Arrays;
 import java.util.List;
 
+@JsonIgnoreProperties(ignoreUnknown = true)
 public class DumpMetaData {
   // wrapper class for reading and writing metadata about a dump
   // responsible for _dumpmetadata files
   public static final String DUMP_METADATA = "_dumpmetadata";
+
+  // New version of dump metadata file to store top level dumpmetadata content 
in JSON format
+  public static final String DUMP_METADATA_V2 = "_dumpmetadata_v2";
   private static final Logger LOG = 
LoggerFactory.getLogger(DumpMetaData.class);
+  private static ObjectMapper JSON_OBJECT_MAPPER = new ObjectMapper(); // 
Thread-safe.
 
+  @JsonProperty
   private DumpType dumpType;
+  @JsonProperty
   private Long eventFrom = null;
+  @JsonProperty
   private Long eventTo = null;
+  @JsonProperty
   private Path cmRoot;
+  @JsonProperty
   private String payload = null;
-  private ReplScope replScope = null;
-
-  private boolean initialized = false;
-  private final Path dumpFile;
-  private final HiveConf hiveConf;
+  @JsonProperty
   private Long dumpExecutionId;
+  @JsonProperty
   private boolean replScopeModified = false;
+  @JsonProperty
+  private String replScopeStr = null;
+  //Ignore rest of the properties
+  @JsonIgnore
+  private ReplScope replScope = null;
+  @JsonIgnore
+  private Path dumpFile;
+  @JsonIgnore
+  private final HiveConf hiveConf;
+  @JsonIgnore
+  private boolean isTopLevel;
+  @JsonIgnore
+  private Path dumpRoot;
+  @JsonIgnore
+  private boolean initialized = false;
+
+  public DumpMetaData() {
+    //to be instantiated by JSON ObjectMapper.
+    hiveConf = null;
+  }
 
   public DumpMetaData(Path dumpRoot, HiveConf hiveConf) {
-    this.hiveConf = hiveConf;
-    dumpFile = new Path(dumpRoot, DUMP_METADATA);
+    this(dumpRoot, hiveConf, false);

Review Comment:
   why this old version dump is still required ?



##########
itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java:
##########
@@ -2116,6 +2119,70 @@ public void testIncrementalLoadWithOneFailedDump() 
throws IOException {
     verifyRun("SELECT a from " + replDbName + ".ptned WHERE b=2", ptnData2, 
driverMirror);
   }
 
+  @Test

Review Comment:
   Add a test to verify the load from an old dump file. Don't use the code to 
dump. Use an old dump directory and the data + meta data should be loaded 
properly. This ha to be for both bootstrap and incremental load.



##########
ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/DumpMetaData.java:
##########
@@ -35,33 +40,64 @@
 import java.util.Arrays;
 import java.util.List;
 
+@JsonIgnoreProperties(ignoreUnknown = true)
 public class DumpMetaData {
   // wrapper class for reading and writing metadata about a dump
   // responsible for _dumpmetadata files
   public static final String DUMP_METADATA = "_dumpmetadata";
+
+  // New version of dump metadata file to store top level dumpmetadata content 
in JSON format
+  public static final String DUMP_METADATA_V2 = "_dumpmetadata_v2";
   private static final Logger LOG = 
LoggerFactory.getLogger(DumpMetaData.class);
+  private static ObjectMapper JSON_OBJECT_MAPPER = new ObjectMapper(); // 
Thread-safe.
 
+  @JsonProperty
   private DumpType dumpType;
+  @JsonProperty
   private Long eventFrom = null;
+  @JsonProperty
   private Long eventTo = null;
+  @JsonProperty
   private Path cmRoot;
+  @JsonProperty
   private String payload = null;
-  private ReplScope replScope = null;
-
-  private boolean initialized = false;
-  private final Path dumpFile;
-  private final HiveConf hiveConf;
+  @JsonProperty
   private Long dumpExecutionId;
+  @JsonProperty
   private boolean replScopeModified = false;
+  @JsonProperty
+  private String replScopeStr = null;
+  //Ignore rest of the properties
+  @JsonIgnore
+  private ReplScope replScope = null;
+  @JsonIgnore
+  private Path dumpFile;
+  @JsonIgnore
+  private final HiveConf hiveConf;
+  @JsonIgnore
+  private boolean isTopLevel;
+  @JsonIgnore
+  private Path dumpRoot;
+  @JsonIgnore
+  private boolean initialized = false;
+
+  public DumpMetaData() {
+    //to be instantiated by JSON ObjectMapper.
+    hiveConf = null;
+  }
 
   public DumpMetaData(Path dumpRoot, HiveConf hiveConf) {
-    this.hiveConf = hiveConf;
-    dumpFile = new Path(dumpRoot, DUMP_METADATA);
+    this(dumpRoot, hiveConf, false);
   }
 
+  public DumpMetaData(Path dumpRoot, HiveConf hiveConf, boolean isTopLevel) {

Review Comment:
   Then name it accordingly 



##########
ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java:
##########
@@ -205,7 +205,8 @@ public int execute() {
         if (previousValidHiveDumpPath == null && !isFailover) {
           work.setBootstrap(true);
         } else {
-          work.setOldReplScope(isFailover ? null : new 
DumpMetaData(previousValidHiveDumpPath, conf).getReplScope());
+          work.setOldReplScope(isFailover ? null : new 
DumpMetaData(previousValidHiveDumpPath, conf,
+                  ReplDumpWork.isTopLevelDmd()).getReplScope());

Review Comment:
   This new parameter isTopLevel can be removed ..It should always dump as V2. 
If there is any requirement to dump in old format, use a config to detect that.





Issue Time Tracking
-------------------

    Worklog Id:     (was: 776074)
    Time Spent: 1.5h  (was: 1h 20m)

> Move top level dump metadata content to be in JSON format
> ---------------------------------------------------------
>
>                 Key: HIVE-24884
>                 URL: https://issues.apache.org/jira/browse/HIVE-24884
>             Project: Hive
>          Issue Type: Task
>            Reporter: Pravin Sinha
>            Assignee: Pravin Sinha
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 1.5h
>  Remaining Estimate: 0h
>
> {color:#172b4d}The current content for _dumpmetadata file is TAB separated. 
> This is not very flexible for extension. A more flexible format like JSON 
> based content would be helpful for extending the content.{color}



--
This message was sent by Atlassian Jira
(v8.20.7#820007)

Reply via email to