This is an automated email from the ASF dual-hosted git repository.

dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new e32d69f07fa [improvement](meta) Switch meta serialization to gson 2 
(#36311)
e32d69f07fa is described below

commit e32d69f07fa57b4aebeb3b57f40f60fa9afa5538
Author: Peyz <[email protected]>
AuthorDate: Sat Jun 15 23:04:59 2024 +0800

    [improvement](meta) Switch meta serialization to gson 2 (#36311)
    
    Switch meta serialization to gson
    Contains the following classes:
    
    BatchAlterJobPersistInfo
    BackupJob
    ~~Repository~~
    RestoreJob
    ~~Database~~
    FunctionSearchDesc
    
    Co-authored-by: zhangpeicheng <[email protected]>
---
 .../org/apache/doris/common/FeMetaVersion.java     |   4 +-
 .../doris/alter/BatchAlterJobPersistInfo.java      |  26 +++--
 .../java/org/apache/doris/analysis/TableRef.java   |   4 +
 .../java/org/apache/doris/backup/AbstractJob.java  |  87 ++++++++--------
 .../java/org/apache/doris/backup/BackupJob.java    |  77 ++++----------
 .../apache/doris/backup/RestoreFileMapping.java    |   2 +
 .../java/org/apache/doris/backup/RestoreJob.java   |  99 +++++-------------
 .../java/org/apache/doris/backup/SnapshotInfo.java |  10 ++
 .../apache/doris/catalog/FunctionSearchDesc.java   |  27 +++--
 .../org/apache/doris/persist/gson/GsonUtils.java   | 116 +++++++++++++++++++--
 .../doris/alter/BatchAlterJobPersistInfoTest.java  |  59 +++++++++++
 .../org/apache/doris/backup/BackupJobTest.java     |  35 +++++++
 .../org/apache/doris/backup/RestoreJobTest.java    |  30 ++++++
 .../doris/catalog/FunctionSearchDescTest.java      |  58 +++++++++++
 14 files changed, 432 insertions(+), 202 deletions(-)

diff --git 
a/fe/fe-common/src/main/java/org/apache/doris/common/FeMetaVersion.java 
b/fe/fe-common/src/main/java/org/apache/doris/common/FeMetaVersion.java
index 5f7b7991e00..a54d30d294e 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/FeMetaVersion.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/FeMetaVersion.java
@@ -90,8 +90,10 @@ public final class FeMetaVersion {
     public static final int VERSION_133 = 133;
     // For mate gson
     public static final int VERSION_134 = 134;
+    // For mate gson
+    public static final int VERSION_135 = 135;
     // note: when increment meta version, should assign the latest version to 
VERSION_CURRENT
-    public static final int VERSION_CURRENT = VERSION_134;
+    public static final int VERSION_CURRENT = VERSION_135;
 
     // all logs meta version should >= the minimum version, so that we could 
remove many if clause, for example
     // if (FE_METAVERSION < VERSION_94) ...
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/alter/BatchAlterJobPersistInfo.java 
b/fe/fe-core/src/main/java/org/apache/doris/alter/BatchAlterJobPersistInfo.java
index 3d6caefee50..b9313302c3c 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/alter/BatchAlterJobPersistInfo.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/alter/BatchAlterJobPersistInfo.java
@@ -17,7 +17,13 @@
 
 package org.apache.doris.alter;
 
+import org.apache.doris.catalog.Env;
+import org.apache.doris.common.FeMetaVersion;
+import org.apache.doris.common.io.Text;
 import org.apache.doris.common.io.Writable;
+import org.apache.doris.persist.gson.GsonUtils;
+
+import com.google.gson.annotations.SerializedName;
 
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -32,6 +38,7 @@ import java.util.List;
  */
 public class BatchAlterJobPersistInfo implements Writable {
 
+    @SerializedName("l")
     private List<AlterJobV2> alterJobV2List;
 
     public BatchAlterJobPersistInfo(List<AlterJobV2> alterJobV2List) {
@@ -40,19 +47,20 @@ public class BatchAlterJobPersistInfo implements Writable {
 
     @Override
     public void write(DataOutput out) throws IOException {
-        out.writeInt(alterJobV2List.size());
-        for (AlterJobV2 alterJobV2 : alterJobV2List) {
-            alterJobV2.write(out);
-        }
+        Text.writeString(out, GsonUtils.GSON.toJson(this));
     }
 
     public static BatchAlterJobPersistInfo read(DataInput in) throws 
IOException {
-        int size = in.readInt();
-        List<AlterJobV2> alterJobV2List = new ArrayList<>();
-        for (int i = 0; i < size; i++) {
-            alterJobV2List.add(AlterJobV2.read(in));
+        if (Env.getCurrentEnvJournalVersion() < FeMetaVersion.VERSION_135) {
+            int size = in.readInt();
+            List<AlterJobV2> alterJobV2List = new ArrayList<>();
+            for (int i = 0; i < size; i++) {
+                alterJobV2List.add(AlterJobV2.read(in));
+            }
+            return new BatchAlterJobPersistInfo(alterJobV2List);
+        } else {
+            return GsonUtils.GSON.fromJson(Text.readString(in), 
BatchAlterJobPersistInfo.class);
         }
-        return new BatchAlterJobPersistInfo(alterJobV2List);
     }
 
     public List<AlterJobV2> getAlterJobV2List() {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java
index 13821a510c6..1de581834d0 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java
@@ -40,6 +40,7 @@ import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
+import com.google.gson.annotations.SerializedName;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
@@ -84,6 +85,7 @@ import java.util.regex.Matcher;
  */
 public class TableRef implements ParseNode, Writable {
     private static final Logger LOG = LogManager.getLogger(TableRef.class);
+    @SerializedName("n")
     protected TableName name;
     // Legal aliases of this table ref. Contains the explicit alias as its 
sole element if
     // there is one. Otherwise, contains the two implicit aliases. Implicit 
aliases are set
@@ -91,6 +93,7 @@ public class TableRef implements ParseNode, Writable {
     // analysis. By convention, for table refs with multiple implicit aliases, 
aliases_[0]
     // contains the fully-qualified implicit alias to ensure that aliases_[0] 
always
     // uniquely identifies this table ref regardless of whether it has an 
explicit alias.
+    @SerializedName("a")
     protected String[] aliases;
     protected List<Long> sampleTabletIds;
     // Indicates whether this table ref is given an explicit alias,
@@ -128,6 +131,7 @@ public class TableRef implements ParseNode, Writable {
     protected List<TupleId> correlatedTupleIds = Lists.newArrayList();
     // analysis output
     protected TupleDescriptor desc;
+    @SerializedName("p")
     private PartitionNames partitionNames = null;
     private ArrayList<String> joinHints;
     private ArrayList<String> sortHints;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/AbstractJob.java 
b/fe/fe-core/src/main/java/org/apache/doris/backup/AbstractJob.java
index 0df9155ab34..d42148f7bc1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/AbstractJob.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/AbstractJob.java
@@ -18,12 +18,15 @@
 package org.apache.doris.backup;
 
 import org.apache.doris.catalog.Env;
+import org.apache.doris.common.FeMetaVersion;
 import org.apache.doris.common.Pair;
 import org.apache.doris.common.io.Text;
 import org.apache.doris.common.io.Writable;
+import org.apache.doris.persist.gson.GsonUtils;
 
-import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
+import com.google.gson.JsonObject;
+import com.google.gson.annotations.SerializedName;
 
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -43,12 +46,14 @@ public abstract class AbstractJob implements Writable {
         BACKUP, RESTORE
     }
 
+    @SerializedName("t")
     protected JobType type;
 
     // must be set right before job's running
     protected Env env;
     // repo will be set at first run()
     protected Repository repo;
+    @SerializedName("rid")
     protected long repoId;
 
     /*
@@ -57,16 +62,23 @@ public abstract class AbstractJob implements Writable {
      * And each time this method is called, the snapshot tasks will be sent 
with (maybe) different
      * version and version hash. So we have to use different job id to 
identify the tasks in different batches.
      */
+    @SerializedName("jid")
     protected long jobId = -1;
 
+    @SerializedName("l")
     protected String label;
+    @SerializedName("dbid")
     protected long dbId;
+    @SerializedName("dbn")
     protected String dbName;
 
     protected Status status = Status.OK;
 
+    @SerializedName("ct")
     protected long createTime = -1;
+    @SerializedName("ft")
     protected long finishedTime = -1;
+    @SerializedName("to")
     protected long timeoutMs;
 
     // task signature -> <finished num / total num>
@@ -75,6 +87,7 @@ public abstract class AbstractJob implements Writable {
     protected boolean isTypeRead = false;
 
     // save err msg of tasks
+    @SerializedName("msg")
     protected Map<Long, String> taskErrMsg = Maps.newHashMap();
 
     protected AbstractJob(JobType type) {
@@ -158,55 +171,47 @@ public abstract class AbstractJob implements Writable {
     public abstract Status updateRepo(Repository repo);
 
     public static AbstractJob read(DataInput in) throws IOException {
-        AbstractJob job = null;
-        JobType type = JobType.valueOf(Text.readString(in));
-        if (type == JobType.BACKUP) {
-            job = new BackupJob();
-        } else if (type == JobType.RESTORE) {
-            job = new RestoreJob();
+        if (Env.getCurrentEnvJournalVersion() < FeMetaVersion.VERSION_135) {
+            AbstractJob job = null;
+            JobType type = JobType.valueOf(Text.readString(in));
+            if (type == JobType.BACKUP) {
+                job = new BackupJob();
+            } else if (type == JobType.RESTORE) {
+                job = new RestoreJob();
+            } else {
+                throw new IOException("Unknown job type: " + type.name());
+            }
+
+            job.setTypeRead(true);
+            job.readFields(in);
+            return job;
         } else {
-            throw new IOException("Unknown job type: " + type.name());
+            String json = Text.readString(in);
+            JsonObject jsonObject = GsonUtils.GSON.fromJson(json, 
JsonObject.class);
+            JobType type = JobType.valueOf(jsonObject.get("t").getAsString());
+            switch (type) {
+                case BACKUP:
+                    return GsonUtils.GSON.fromJson(json, BackupJob.class);
+                case RESTORE:
+                    return GsonUtils.GSON.fromJson(json, RestoreJob.class);
+                default:
+                    throw new IOException("Unknown job type: " + type.name());
+            }
         }
-
-        job.setTypeRead(true);
-        job.readFields(in);
-        return job;
     }
 
     @Override
     public void write(DataOutput out) throws IOException {
-        // ATTN: must write type first
-        Text.writeString(out, type.name());
-
-        out.writeLong(repoId);
-        Text.writeString(out, label);
-        out.writeLong(jobId);
-        out.writeLong(dbId);
-        Text.writeString(out, dbName);
-
-        out.writeLong(createTime);
-        out.writeLong(finishedTime);
-        out.writeLong(timeoutMs);
-
-        if (!taskErrMsg.isEmpty()) {
-            out.writeBoolean(true);
-            // we only save at most 3 err msgs
-            int savedNum = Math.min(3, taskErrMsg.size());
-            out.writeInt(savedNum);
-            for (Map.Entry<Long, String> entry : taskErrMsg.entrySet()) {
-                if (savedNum == 0) {
-                    break;
-                }
-                out.writeLong(entry.getKey());
-                Text.writeString(out, entry.getValue());
-                savedNum--;
-            }
-            Preconditions.checkState(savedNum == 0, savedNum);
-        } else {
-            out.writeBoolean(false);
+        if (taskErrMsg != null) {
+            Map<Long, String> newMsg = Maps.newHashMap();
+            taskErrMsg.entrySet().stream().limit(3)
+                .forEach(e -> newMsg.put(e.getKey(), e.getValue()));
+            taskErrMsg = newMsg;
         }
+        Text.writeString(out, GsonUtils.GSON.toJson(this));
     }
 
+    @Deprecated
     public void readFields(DataInput in) throws IOException {
         if (!isTypeRead) {
             type = JobType.valueOf(Text.readString(in));
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/BackupJob.java 
b/fe/fe-core/src/main/java/org/apache/doris/backup/BackupJob.java
index fc846bf1820..b47cbce1a14 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/BackupJob.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/BackupJob.java
@@ -35,10 +35,12 @@ import org.apache.doris.catalog.Table;
 import org.apache.doris.catalog.Tablet;
 import org.apache.doris.catalog.View;
 import org.apache.doris.common.Config;
+import org.apache.doris.common.FeMetaVersion;
 import org.apache.doris.common.io.Text;
 import org.apache.doris.common.util.TimeUtils;
 import org.apache.doris.datasource.property.S3ClientBEProperties;
 import org.apache.doris.persist.BarrierLog;
+import org.apache.doris.persist.gson.GsonUtils;
 import org.apache.doris.task.AgentBatchTask;
 import org.apache.doris.task.AgentTask;
 import org.apache.doris.task.AgentTaskExecutor;
@@ -53,16 +55,15 @@ import org.apache.doris.thrift.TTaskType;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicates;
-import com.google.common.base.Strings;
 import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.Collections2;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import com.google.gson.annotations.SerializedName;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
 import java.io.DataInput;
-import java.io.DataOutput;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.FileVisitOption;
@@ -92,18 +93,24 @@ public class BackupJob extends AbstractJob {
     }
 
     // all objects which need backup
+    @SerializedName("ref")
     private List<TableRef> tableRefs = Lists.newArrayList();
 
+    @SerializedName("st")
     private BackupJobState state;
 
+    @SerializedName("sft")
     private long snapshotFinishedTime = -1;
+    @SerializedName("suft")
     private long snapshotUploadFinishedTime = -1;
 
     // save task id map to the backend it be executed
     private Map<Long, Long> unfinishedTaskIds = Maps.newConcurrentMap();
     // tablet id -> snapshot info
+    @SerializedName("si")
     private Map<Long, SnapshotInfo> snapshotInfos = Maps.newConcurrentMap();
     // save all related table[partition] info
+    @SerializedName("meta")
     private BackupMeta backupMeta;
     // job info file content
     private BackupJobInfo jobInfo;
@@ -112,9 +119,12 @@ public class BackupJob extends AbstractJob {
     // after job is done, this dir should be deleted
     private Path localJobDirPath = null;
     // save the local file path of meta info and job info file
+    @SerializedName("mifp")
     private String localMetaInfoFilePath = null;
+    @SerializedName("jifp")
     private String localJobInfoFilePath = null;
     // backup properties && table commit seq with table id
+    @SerializedName("prop")
     private Map<String, String> properties = Maps.newHashMap();
 
     private byte[] metaInfoBytes = null;
@@ -967,67 +977,16 @@ public class BackupJob extends AbstractJob {
     }
 
     public static BackupJob read(DataInput in) throws IOException {
-        BackupJob job = new BackupJob();
-        job.readFields(in);
-        return job;
-    }
-
-    @Override
-    public void write(DataOutput out) throws IOException {
-        super.write(out);
-
-        // table refs
-        out.writeInt(tableRefs.size());
-        for (TableRef tblRef : tableRefs) {
-            tblRef.write(out);
-        }
-
-        // state
-        Text.writeString(out, state.name());
-
-        // times
-        out.writeLong(snapshotFinishedTime);
-        out.writeLong(snapshotUploadFinishedTime);
-
-        // snapshot info
-        out.writeInt(snapshotInfos.size());
-        for (SnapshotInfo info : snapshotInfos.values()) {
-            info.write(out);
-        }
-
-        // backup meta
-        if (backupMeta == null) {
-            out.writeBoolean(false);
+        if (Env.getCurrentEnvJournalVersion() < FeMetaVersion.VERSION_135) {
+            BackupJob job = new BackupJob();
+            job.readFields(in);
+            return job;
         } else {
-            out.writeBoolean(true);
-            backupMeta.write(out);
-        }
-
-        // No need to persist job info. It is generated then write to file
-
-        // metaInfoFilePath and jobInfoFilePath
-        if (Strings.isNullOrEmpty(localMetaInfoFilePath)) {
-            out.writeBoolean(false);
-        } else {
-            out.writeBoolean(true);
-            Text.writeString(out, localMetaInfoFilePath);
-        }
-
-        if (Strings.isNullOrEmpty(localJobInfoFilePath)) {
-            out.writeBoolean(false);
-        } else {
-            out.writeBoolean(true);
-            Text.writeString(out, localJobInfoFilePath);
-        }
-
-        // write properties
-        out.writeInt(properties.size());
-        for (Map.Entry<String, String> entry : properties.entrySet()) {
-            Text.writeString(out, entry.getKey());
-            Text.writeString(out, entry.getValue());
+            return GsonUtils.GSON.fromJson(Text.readString(in), 
BackupJob.class);
         }
     }
 
+    @Deprecated
     public void readFields(DataInput in) throws IOException {
         super.readFields(in);
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreFileMapping.java 
b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreFileMapping.java
index 07ddf6844dc..829367d96a6 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreFileMapping.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreFileMapping.java
@@ -22,6 +22,7 @@ import org.apache.doris.common.io.Writable;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
+import com.google.gson.annotations.SerializedName;
 
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -32,6 +33,7 @@ public class RestoreFileMapping implements Writable {
 
     public static class IdChain implements Writable {
         // tblId, partId, idxId, tabletId, replicaId
+        @SerializedName("c")
         private Long[] chain;
 
         private IdChain() {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java 
b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java
index 70a0ed94c17..53f1da8582b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java
@@ -66,6 +66,7 @@ import org.apache.doris.common.util.DynamicPartitionUtil;
 import org.apache.doris.common.util.PropertyAnalyzer;
 import org.apache.doris.common.util.TimeUtils;
 import org.apache.doris.datasource.property.S3ClientBEProperties;
+import org.apache.doris.persist.gson.GsonUtils;
 import org.apache.doris.resource.Tag;
 import org.apache.doris.task.AgentBatchTask;
 import org.apache.doris.task.AgentTask;
@@ -93,11 +94,11 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Multimap;
 import com.google.common.collect.Table.Cell;
+import com.google.gson.annotations.SerializedName;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
 import java.io.DataInput;
-import java.io.DataOutput;
 import java.io.IOException;
 import java.util.List;
 import java.util.Map;
@@ -128,21 +129,31 @@ public class RestoreJob extends AbstractJob {
     }
     // CHECKSTYLE ON
 
+    @SerializedName("bts")
     private String backupTimestamp;
 
+    @SerializedName("j")
     private BackupJobInfo jobInfo;
+    @SerializedName("al")
     private boolean allowLoad;
 
+    @SerializedName("st")
     private RestoreJobState state;
 
+    @SerializedName("meta")
     private BackupMeta backupMeta;
 
+    @SerializedName("fm")
     private RestoreFileMapping fileMapping = new RestoreFileMapping();
 
+    @SerializedName("mpt")
     private long metaPreparedTime = -1;
+    @SerializedName("sft")
     private long snapshotFinishedTime = -1;
+    @SerializedName("dft")
     private long downloadFinishedTime = -1;
 
+    @SerializedName("ra")
     private ReplicaAllocation replicaAlloc;
 
     private boolean reserveReplica = false;
@@ -150,14 +161,19 @@ public class RestoreJob extends AbstractJob {
 
     // this 2 members is to save all newly restored objs
     // tbl name -> part
+    @SerializedName("rp")
     private List<Pair<String, Partition>> restoredPartitions = 
Lists.newArrayList();
+    @SerializedName("rt")
     private List<Table> restoredTbls = Lists.newArrayList();
+    @SerializedName("rr")
     private List<Resource> restoredResources = Lists.newArrayList();
 
     // save all restored partitions' version info which are already exist in 
catalog
     // table id -> partition id -> (version, version hash)
+    @SerializedName("rvi")
     private com.google.common.collect.Table<Long, Long, Long> 
restoredVersionInfo = HashBasedTable.create();
     // tablet id->(be id -> snapshot info)
+    @SerializedName("si")
     private com.google.common.collect.Table<Long, Long, SnapshotInfo> 
snapshotInfos = HashBasedTable.create();
 
     private Map<Long, Long> unfinishedSignatureToId = Maps.newConcurrentMap();
@@ -174,6 +190,7 @@ public class RestoreJob extends AbstractJob {
     private boolean isBeingSynced = false;
 
     // restore properties
+    @SerializedName("prop")
     private Map<String, String> properties = Maps.newHashMap();
 
     public RestoreJob() {
@@ -2073,84 +2090,16 @@ public class RestoreJob extends AbstractJob {
     }
 
     public static RestoreJob read(DataInput in) throws IOException {
-        RestoreJob job = new RestoreJob();
-        job.readFields(in);
-        return job;
-    }
-
-    @Override
-    public void write(DataOutput out) throws IOException {
-        super.write(out);
-
-        Text.writeString(out, backupTimestamp);
-        jobInfo.write(out);
-        out.writeBoolean(allowLoad);
-
-        Text.writeString(out, state.name());
-
-        if (backupMeta != null) {
-            out.writeBoolean(true);
-            backupMeta.write(out);
+        if (Env.getCurrentEnvJournalVersion() < FeMetaVersion.VERSION_135) {
+            RestoreJob job = new RestoreJob();
+            job.readFields(in);
+            return job;
         } else {
-            out.writeBoolean(false);
-        }
-
-        fileMapping.write(out);
-
-        out.writeLong(metaPreparedTime);
-        out.writeLong(snapshotFinishedTime);
-        out.writeLong(downloadFinishedTime);
-
-        replicaAlloc.write(out);
-
-        out.writeInt(restoredPartitions.size());
-        for (Pair<String, Partition> entry : restoredPartitions) {
-            Text.writeString(out, entry.first);
-            entry.second.write(out);
-        }
-
-        out.writeInt(restoredTbls.size());
-        for (Table tbl : restoredTbls) {
-            tbl.write(out);
-        }
-
-        out.writeInt(restoredVersionInfo.rowKeySet().size());
-        for (long tblId : restoredVersionInfo.rowKeySet()) {
-            out.writeLong(tblId);
-            out.writeInt(restoredVersionInfo.row(tblId).size());
-            for (Map.Entry<Long, Long> entry : 
restoredVersionInfo.row(tblId).entrySet()) {
-                out.writeLong(entry.getKey());
-                out.writeLong(entry.getValue());
-                // It is version hash in the past,
-                // but it useless but should compatible with old version so 
that write 0 here
-                out.writeLong(0L);
-            }
-        }
-
-        out.writeInt(snapshotInfos.rowKeySet().size());
-        for (long tabletId : snapshotInfos.rowKeySet()) {
-            out.writeLong(tabletId);
-            Map<Long, SnapshotInfo> map = snapshotInfos.row(tabletId);
-            out.writeInt(map.size());
-            for (Map.Entry<Long, SnapshotInfo> entry : map.entrySet()) {
-                out.writeLong(entry.getKey());
-                entry.getValue().write(out);
-            }
-        }
-
-        out.writeInt(restoredResources.size());
-        for (Resource resource : restoredResources) {
-            resource.write(out);
-        }
-
-        // write properties
-        out.writeInt(properties.size());
-        for (Map.Entry<String, String> entry : properties.entrySet()) {
-            Text.writeString(out, entry.getKey());
-            Text.writeString(out, entry.getValue());
+            return GsonUtils.GSON.fromJson(Text.readString(in), 
RestoreJob.class);
         }
     }
 
+    @Deprecated
     @Override
     public void readFields(DataInput in) throws IOException {
         super.readFields(in);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/SnapshotInfo.java 
b/fe/fe-core/src/main/java/org/apache/doris/backup/SnapshotInfo.java
index 60847e822dc..38e91c42200 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/SnapshotInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/SnapshotInfo.java
@@ -22,6 +22,7 @@ import org.apache.doris.common.io.Writable;
 
 import com.google.common.base.Joiner;
 import com.google.common.collect.Lists;
+import com.google.gson.annotations.SerializedName;
 
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -29,19 +30,28 @@ import java.io.IOException;
 import java.util.List;
 
 public class SnapshotInfo implements Writable {
+    @SerializedName("db")
     private long dbId;
+    @SerializedName("tbl")
     private long tblId;
+    @SerializedName("p")
     private long partitionId;
+    @SerializedName("ind")
     private long indexId;
+    @SerializedName("tab")
     private long tabletId;
+    @SerializedName("be")
     private long beId;
+    @SerializedName("sh")
     private int schemaHash;
     // eg: /path/to/your/be/data/snapshot/20180410102311.0.86400/
+    @SerializedName("path")
     private String path;
     // eg:
     // 10006_0_1_0_0.dat
     // 10006_2_2_0_0.idx
     // 10006.hdr
+    @SerializedName("f")
     private List<String> files = Lists.newArrayList();
 
     public SnapshotInfo() {
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/catalog/FunctionSearchDesc.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/FunctionSearchDesc.java
index 3eed1ab4996..6e87e83b649 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/FunctionSearchDesc.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/FunctionSearchDesc.java
@@ -18,7 +18,12 @@
 package org.apache.doris.catalog;
 
 import org.apache.doris.analysis.FunctionName;
+import org.apache.doris.common.FeMetaVersion;
+import org.apache.doris.common.io.Text;
 import org.apache.doris.common.io.Writable;
+import org.apache.doris.persist.gson.GsonUtils;
+
+import com.google.gson.annotations.SerializedName;
 
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -26,8 +31,11 @@ import java.io.IOException;
 
 // Used to search a function
 public class FunctionSearchDesc implements Writable {
+    @SerializedName("n")
     private FunctionName name;
+    @SerializedName("t")
     private Type[] argTypes;
+    @SerializedName("isV")
     private boolean isVariadic;
 
     private FunctionSearchDesc() {}
@@ -93,15 +101,10 @@ public class FunctionSearchDesc implements Writable {
 
     @Override
     public void write(DataOutput out) throws IOException {
-        name.write(out);
-        // write args
-        out.writeShort(argTypes.length);
-        for (Type type : argTypes) {
-            ColumnType.write(out, type);
-        }
-        out.writeBoolean(isVariadic);
+        Text.writeString(out, GsonUtils.GSON.toJson(this));
     }
 
+    @Deprecated
     public void readFields(DataInput in) throws IOException {
         name = FunctionName.read(in);
         // read args
@@ -114,8 +117,12 @@ public class FunctionSearchDesc implements Writable {
     }
 
     public static FunctionSearchDesc read(DataInput input) throws IOException {
-        FunctionSearchDesc function = new FunctionSearchDesc();
-        function.readFields(input);
-        return function;
+        if (Env.getCurrentEnvJournalVersion() < FeMetaVersion.VERSION_135) {
+            FunctionSearchDesc function = new FunctionSearchDesc();
+            function.readFields(input);
+            return function;
+        } else {
+            return GsonUtils.GSON.fromJson(Text.readString(input), 
FunctionSearchDesc.class);
+        }
     }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/persist/gson/GsonUtils.java 
b/fe/fe-core/src/main/java/org/apache/doris/persist/gson/GsonUtils.java
index 482118bb4ee..a2fae7ad35a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/gson/GsonUtils.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/gson/GsonUtils.java
@@ -58,19 +58,35 @@ import org.apache.doris.analysis.StringLiteral;
 import org.apache.doris.analysis.StructLiteral;
 import org.apache.doris.analysis.TimestampArithmeticExpr;
 import org.apache.doris.analysis.VirtualSlotRef;
+import org.apache.doris.backup.BackupJob;
+import org.apache.doris.backup.RestoreJob;
 import org.apache.doris.catalog.AggStateType;
+import org.apache.doris.catalog.AnyElementType;
+import org.apache.doris.catalog.AnyStructType;
+import org.apache.doris.catalog.AnyType;
 import org.apache.doris.catalog.ArrayType;
+import org.apache.doris.catalog.BrokerTable;
 import org.apache.doris.catalog.DatabaseIf;
 import org.apache.doris.catalog.DistributionInfo;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.catalog.EsResource;
+import org.apache.doris.catalog.EsTable;
+import org.apache.doris.catalog.FunctionGenTable;
 import org.apache.doris.catalog.HMSResource;
 import org.apache.doris.catalog.HashDistributionInfo;
 import org.apache.doris.catalog.HdfsResource;
+import org.apache.doris.catalog.HiveTable;
+import org.apache.doris.catalog.InlineView;
 import org.apache.doris.catalog.JdbcResource;
+import org.apache.doris.catalog.JdbcTable;
 import org.apache.doris.catalog.ListPartitionInfo;
+import org.apache.doris.catalog.MTMV;
 import org.apache.doris.catalog.MapType;
+import org.apache.doris.catalog.MultiRowType;
+import org.apache.doris.catalog.MysqlDBTable;
+import org.apache.doris.catalog.MysqlTable;
 import org.apache.doris.catalog.OdbcCatalogResource;
+import org.apache.doris.catalog.OdbcTable;
 import org.apache.doris.catalog.OlapTable;
 import org.apache.doris.catalog.Partition;
 import org.apache.doris.catalog.PartitionInfo;
@@ -81,11 +97,15 @@ import org.apache.doris.catalog.Replica;
 import org.apache.doris.catalog.Resource;
 import org.apache.doris.catalog.S3Resource;
 import org.apache.doris.catalog.ScalarType;
+import org.apache.doris.catalog.SchemaTable;
 import org.apache.doris.catalog.SinglePartitionInfo;
 import org.apache.doris.catalog.SparkResource;
 import org.apache.doris.catalog.StructType;
 import org.apache.doris.catalog.TableIf;
 import org.apache.doris.catalog.Tablet;
+import org.apache.doris.catalog.TemplateType;
+import org.apache.doris.catalog.VariantType;
+import org.apache.doris.catalog.View;
 import org.apache.doris.catalog.constraint.Constraint;
 import org.apache.doris.catalog.constraint.ForeignKeyConstraint;
 import org.apache.doris.catalog.constraint.PrimaryKeyConstraint;
@@ -139,7 +159,13 @@ import org.apache.doris.datasource.test.TestExternalTable;
 import 
org.apache.doris.datasource.trinoconnector.TrinoConnectorExternalCatalog;
 import 
org.apache.doris.datasource.trinoconnector.TrinoConnectorExternalDatabase;
 import org.apache.doris.datasource.trinoconnector.TrinoConnectorExternalTable;
-import org.apache.doris.job.base.AbstractJob;
+import org.apache.doris.fs.remote.BrokerFileSystem;
+import org.apache.doris.fs.remote.ObjFileSystem;
+import org.apache.doris.fs.remote.RemoteFileSystem;
+import org.apache.doris.fs.remote.S3FileSystem;
+import org.apache.doris.fs.remote.dfs.DFSFileSystem;
+import org.apache.doris.fs.remote.dfs.JFSFileSystem;
+import org.apache.doris.fs.remote.dfs.OFSFileSystem;
 import org.apache.doris.job.extensions.insert.InsertJob;
 import org.apache.doris.job.extensions.mtmv.MTMVJob;
 import org.apache.doris.load.loadv2.LoadJob.LoadJobStateUpdateInfo;
@@ -236,7 +262,13 @@ public class GsonUtils {
             .registerSubtype(ArrayType.class, ArrayType.class.getSimpleName())
             .registerSubtype(MapType.class, MapType.class.getSimpleName())
             .registerSubtype(StructType.class, 
StructType.class.getSimpleName())
-            .registerSubtype(AggStateType.class, 
AggStateType.class.getSimpleName());
+            .registerSubtype(AggStateType.class, 
AggStateType.class.getSimpleName())
+            .registerSubtype(AnyElementType.class, 
AnyElementType.class.getSimpleName())
+            .registerSubtype(AnyStructType.class, 
AnyStructType.class.getSimpleName())
+            .registerSubtype(AnyType.class, AnyType.class.getSimpleName())
+            .registerSubtype(MultiRowType.class, 
MultiRowType.class.getSimpleName())
+            .registerSubtype(TemplateType.class, 
TemplateType.class.getSimpleName())
+            .registerSubtype(VariantType.class, 
VariantType.class.getSimpleName());
 
     // runtime adapter for class "Expr"
     private static final 
RuntimeTypeAdapterFactory<org.apache.doris.analysis.Expr> exprAdapterFactory
@@ -373,10 +405,11 @@ public class GsonUtils {
             RuntimeTypeAdapterFactory.of(
                             AbstractDataSourceProperties.class, "clazz")
                     .registerSubtype(KafkaDataSourceProperties.class, 
KafkaDataSourceProperties.class.getSimpleName());
-    private static RuntimeTypeAdapterFactory<AbstractJob> 
jobExecutorRuntimeTypeAdapterFactory =
-            RuntimeTypeAdapterFactory.of(AbstractJob.class, "clazz")
-                    .registerSubtype(InsertJob.class, 
InsertJob.class.getSimpleName())
-                    .registerSubtype(MTMVJob.class, 
MTMVJob.class.getSimpleName());
+    private static 
RuntimeTypeAdapterFactory<org.apache.doris.job.base.AbstractJob>
+            jobExecutorRuntimeTypeAdapterFactory
+                    = 
RuntimeTypeAdapterFactory.of(org.apache.doris.job.base.AbstractJob.class, 
"clazz")
+                            .registerSubtype(InsertJob.class, 
InsertJob.class.getSimpleName())
+                            .registerSubtype(MTMVJob.class, 
MTMVJob.class.getSimpleName());
 
     private static RuntimeTypeAdapterFactory<MTMVSnapshotIf> 
mtmvSnapshotTypeAdapterFactory =
             RuntimeTypeAdapterFactory.of(MTMVSnapshotIf.class, "clazz")
@@ -412,7 +445,19 @@ public class GsonUtils {
             .registerSubtype(ExternalInfoSchemaTable.class, 
ExternalInfoSchemaTable.class.getSimpleName())
             .registerSubtype(ExternalMysqlTable.class, 
ExternalMysqlTable.class.getSimpleName())
             .registerSubtype(TrinoConnectorExternalTable.class, 
TrinoConnectorExternalTable.class.getSimpleName())
-            .registerSubtype(TestExternalTable.class, 
TestExternalTable.class.getSimpleName());
+            .registerSubtype(TestExternalTable.class, 
TestExternalTable.class.getSimpleName())
+            .registerSubtype(BrokerTable.class, 
BrokerTable.class.getSimpleName())
+            .registerSubtype(EsTable.class, EsTable.class.getSimpleName())
+            .registerSubtype(FunctionGenTable.class, 
FunctionGenTable.class.getSimpleName())
+            .registerSubtype(HiveTable.class, HiveTable.class.getSimpleName())
+            .registerSubtype(InlineView.class, 
InlineView.class.getSimpleName())
+            .registerSubtype(JdbcTable.class, JdbcTable.class.getSimpleName())
+            .registerSubtype(MTMV.class, MTMV.class.getSimpleName())
+            .registerSubtype(MysqlDBTable.class, 
MysqlDBTable.class.getSimpleName())
+            .registerSubtype(MysqlTable.class, 
MysqlTable.class.getSimpleName())
+            .registerSubtype(OdbcTable.class, OdbcTable.class.getSimpleName())
+            .registerSubtype(SchemaTable.class, 
SchemaTable.class.getSimpleName())
+            .registerSubtype(View.class, View.class.getSimpleName());
 
     // runtime adapter for class "PartitionInfo"
     private static RuntimeTypeAdapterFactory<PartitionInfo> 
partitionInfoTypeAdapterFactory
@@ -471,6 +516,59 @@ public class GsonUtils {
             .registerDefaultSubtype(RoutineLoadProgress.class)
             .registerSubtype(KafkaProgress.class, 
KafkaProgress.class.getSimpleName());
 
+    private static RuntimeTypeAdapterFactory<RemoteFileSystem> 
remoteFileSystemTypeAdapterFactory
+            = RuntimeTypeAdapterFactory.of(RemoteFileSystem.class, "clazz")
+            .registerSubtype(BrokerFileSystem.class, 
BrokerFileSystem.class.getSimpleName())
+            .registerSubtype(DFSFileSystem.class, 
DFSFileSystem.class.getSimpleName())
+            .registerSubtype(JFSFileSystem.class, 
JFSFileSystem.class.getSimpleName())
+            .registerSubtype(OFSFileSystem.class, 
OFSFileSystem.class.getSimpleName())
+            .registerSubtype(ObjFileSystem.class, 
ObjFileSystem.class.getSimpleName())
+            .registerSubtype(S3FileSystem.class, 
S3FileSystem.class.getSimpleName());
+
+    private static 
RuntimeTypeAdapterFactory<org.apache.doris.backup.AbstractJob>
+            jobBackupTypeAdapterFactory
+                    = 
RuntimeTypeAdapterFactory.of(org.apache.doris.backup.AbstractJob.class, "clazz")
+                    .registerSubtype(BackupJob.class, 
BackupJob.class.getSimpleName())
+                    .registerSubtype(RestoreJob.class, 
RestoreJob.class.getSimpleName());
+
+    private static RuntimeTypeAdapterFactory<org.apache.doris.catalog.Table>
+            tableTypeAdapterFactory
+                     = 
RuntimeTypeAdapterFactory.of(org.apache.doris.catalog.Table.class, "table")
+                    .registerSubtype(OlapTable.class, 
OlapTable.class.getSimpleName())
+                    .registerSubtype(BrokerTable.class, 
BrokerTable.class.getSimpleName())
+                    .registerSubtype(EsTable.class, 
EsTable.class.getSimpleName())
+                    .registerSubtype(FunctionGenTable.class, 
FunctionGenTable.class.getSimpleName())
+                    .registerSubtype(HiveTable.class, 
HiveTable.class.getSimpleName())
+                    .registerSubtype(InlineView.class, 
InlineView.class.getSimpleName())
+                    .registerSubtype(JdbcTable.class, 
JdbcTable.class.getSimpleName())
+                    .registerSubtype(MTMV.class, MTMV.class.getSimpleName())
+                    .registerSubtype(MysqlDBTable.class, 
MysqlDBTable.class.getSimpleName())
+                    .registerSubtype(MysqlTable.class, 
MysqlTable.class.getSimpleName())
+                    .registerSubtype(OdbcTable.class, 
OdbcTable.class.getSimpleName())
+                    .registerSubtype(SchemaTable.class, 
SchemaTable.class.getSimpleName())
+                    .registerSubtype(View.class, View.class.getSimpleName());
+
+    private static 
RuntimeTypeAdapterFactory<org.apache.doris.analysis.LiteralExpr>
+            literalExprAdapterFactory
+                    = 
RuntimeTypeAdapterFactory.of(org.apache.doris.analysis.LiteralExpr.class, 
"literalExpr")
+                    .registerSubtype(BoolLiteral.class, 
BoolLiteral.class.getSimpleName())
+                    .registerSubtype(MaxLiteral.class, 
MaxLiteral.class.getSimpleName())
+                    .registerSubtype(StringLiteral.class, 
StringLiteral.class.getSimpleName())
+                    .registerSubtype(IntLiteral.class, 
IntLiteral.class.getSimpleName())
+                    .registerSubtype(LargeIntLiteral.class, 
LargeIntLiteral.class.getSimpleName())
+                    .registerSubtype(DecimalLiteral.class, 
DecimalLiteral.class.getSimpleName())
+                    .registerSubtype(FloatLiteral.class, 
FloatLiteral.class.getSimpleName())
+                    .registerSubtype(NullLiteral.class, 
NullLiteral.class.getSimpleName())
+                    .registerSubtype(MapLiteral.class, 
MapLiteral.class.getSimpleName())
+                    .registerSubtype(DateLiteral.class, 
DateLiteral.class.getSimpleName())
+                    .registerSubtype(IPv6Literal.class, 
IPv6Literal.class.getSimpleName())
+                    .registerSubtype(IPv4Literal.class, 
IPv4Literal.class.getSimpleName())
+                    .registerSubtype(JsonLiteral.class, 
JsonLiteral.class.getSimpleName())
+                    .registerSubtype(ArrayLiteral.class, 
ArrayLiteral.class.getSimpleName())
+                    .registerSubtype(StructLiteral.class, 
StructLiteral.class.getSimpleName())
+                    .registerSubtype(NumericLiteralExpr.class, 
NumericLiteralExpr.class.getSimpleName())
+                    .registerSubtype(PlaceHolderExpr.class, 
PlaceHolderExpr.class.getSimpleName());
+
     // the builder of GSON instance.
     // Add any other adapters if necessary.
     private static final GsonBuilder GSON_BUILDER = new 
GsonBuilder().addSerializationExclusionStrategy(
@@ -501,6 +599,10 @@ public class GsonUtils {
             .registerTypeAdapterFactory(constraintTypeAdapterFactory)
             .registerTypeAdapterFactory(txnCommitAttachmentTypeAdapterFactory)
             .registerTypeAdapterFactory(routineLoadTypeAdapterFactory)
+            .registerTypeAdapterFactory(remoteFileSystemTypeAdapterFactory)
+            .registerTypeAdapterFactory(jobBackupTypeAdapterFactory)
+            .registerTypeAdapterFactory(tableTypeAdapterFactory)
+            .registerTypeAdapterFactory(literalExprAdapterFactory)
             .registerTypeAdapter(ImmutableMap.class, new 
ImmutableMapDeserializer())
             .registerTypeAdapter(AtomicBoolean.class, new 
AtomicBooleanAdapter())
             .registerTypeAdapter(PartitionKey.class, new 
PartitionKey.PartitionKeySerializer())
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/alter/BatchAlterJobPersistInfoTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/alter/BatchAlterJobPersistInfoTest.java
new file mode 100644
index 00000000000..69dacb48c20
--- /dev/null
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/alter/BatchAlterJobPersistInfoTest.java
@@ -0,0 +1,59 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.alter;
+
+import org.apache.doris.common.AnalysisException;
+
+import com.google.common.collect.Lists;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.List;
+
+public class BatchAlterJobPersistInfoTest {
+    @Test
+    public void testSerialization() throws IOException, AnalysisException {
+        // 1. Write objects to file
+        final Path path = Files.createTempFile("batchAlterJobPersistInfo", 
"tmp");
+        DataOutputStream out = new 
DataOutputStream(Files.newOutputStream(path));
+
+        List<AlterJobV2> alterJobV2s = Lists.newArrayList();
+        alterJobV2s.add(new RollupJobV2());
+        BatchAlterJobPersistInfo info = new 
BatchAlterJobPersistInfo(alterJobV2s);
+
+        info.write(out);
+        out.flush();
+        out.close();
+
+        // 2. Read objects from file
+        DataInputStream in = new DataInputStream(Files.newInputStream(path));
+
+        BatchAlterJobPersistInfo info2 = BatchAlterJobPersistInfo.read(in);
+
+        Assert.assertEquals(info.getAlterJobV2List().get(0).getType(), 
info2.getAlterJobV2List().get(0).getType());
+
+        // 3. delete files
+        in.close();
+        Files.delete(path);
+    }
+}
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/backup/BackupJobTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/backup/BackupJobTest.java
index 4e0eecda1fa..68f02e8eee8 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/backup/BackupJobTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/backup/BackupJobTest.java
@@ -26,6 +26,7 @@ import org.apache.doris.catalog.Database;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.catalog.FsBroker;
 import org.apache.doris.catalog.OlapTable;
+import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.FeConstants;
 import org.apache.doris.common.jmockit.Deencapsulation;
@@ -58,6 +59,8 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.FileVisitOption;
@@ -353,4 +356,36 @@ public class BackupJobTest {
         Assert.assertEquals(Status.ErrCode.NOT_FOUND, 
job.getStatus().getErrCode());
         Assert.assertEquals(BackupJobState.CANCELLED, job.getState());
     }
+
+    @Test
+    public void testSerialization() throws IOException, AnalysisException {
+        // 1. Write objects to file
+        final Path path = Files.createTempFile("backupJob", "tmp");
+        DataOutputStream out = new 
DataOutputStream(Files.newOutputStream(path));
+
+        List<TableRef> tableRefs = Lists.newArrayList();
+        tableRefs.add(
+                new TableRef(new 
TableName(InternalCatalog.INTERNAL_CATALOG_NAME, UnitTestUtil.DB_NAME, 
UnitTestUtil.TABLE_NAME),
+                        null));
+        job = new BackupJob("label", dbId, UnitTestUtil.DB_NAME, tableRefs, 
13600 * 1000, BackupStmt.BackupContent.ALL,
+            env, repo.getId());
+
+        job.write(out);
+        out.flush();
+        out.close();
+
+        // 2. Read objects from file
+        DataInputStream in = new DataInputStream(Files.newInputStream(path));
+
+        BackupJob job2 = BackupJob.read(in);
+
+        Assert.assertEquals(job.getJobId(), job2.getJobId());
+        Assert.assertEquals(job.getDbId(), job2.getDbId());
+        Assert.assertEquals(job.getCreateTime(), job2.getCreateTime());
+        Assert.assertEquals(job.getType(), job2.getType());
+
+        // 3. delete files
+        in.close();
+        Files.delete(path);
+    }
 }
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java
index ff60a6e8b90..71cad0438c9 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java
@@ -51,9 +51,15 @@ import mockit.Injectable;
 import mockit.Mock;
 import mockit.MockUp;
 import mockit.Mocked;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
@@ -276,4 +282,28 @@ public class RestoreJobTest {
         System.out.println("tbl signature: " + 
tbl.getSignature(BackupHandler.SIGNATURE_VERSION, partNames));
     }
 
+    @Test
+    public void testSerialization() throws IOException, AnalysisException {
+        // 1. Write objects to file
+        final Path path = Files.createTempFile("restoreJob", "tmp");
+        DataOutputStream out = new 
DataOutputStream(Files.newOutputStream(path));
+
+        job.write(out);
+        out.flush();
+        out.close();
+
+        // 2. Read objects from file
+        DataInputStream in = new DataInputStream(Files.newInputStream(path));
+
+        RestoreJob job2 = RestoreJob.read(in);
+
+        Assert.assertEquals(job.getJobId(), job2.getJobId());
+        Assert.assertEquals(job.getDbId(), job2.getDbId());
+        Assert.assertEquals(job.getCreateTime(), job2.getCreateTime());
+        Assert.assertEquals(job.getType(), job2.getType());
+
+        // 3. delete files
+        in.close();
+        Files.delete(path);
+    }
 }
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/catalog/FunctionSearchDescTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/catalog/FunctionSearchDescTest.java
new file mode 100644
index 00000000000..9df24c5ff9d
--- /dev/null
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/catalog/FunctionSearchDescTest.java
@@ -0,0 +1,58 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.catalog;
+
+import org.apache.doris.analysis.FunctionName;
+import org.apache.doris.common.AnalysisException;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+
+public class FunctionSearchDescTest {
+    @Test
+    public void testSerialization() throws IOException, AnalysisException {
+        // 1. Write objects to file
+        final Path path = Files.createTempFile("functionSearchDesc", "tmp");
+        DataOutputStream out = new 
DataOutputStream(Files.newOutputStream(path));
+
+        FunctionSearchDesc desc = new FunctionSearchDesc(new 
FunctionName("test"), new Type[]{Type.INT, Type.INT}, false);
+        desc.write(out);
+        out.flush();
+        out.close();
+
+        // 2. Read objects from file
+        DataInputStream in = new DataInputStream(Files.newInputStream(path));
+
+        FunctionSearchDesc desc2 = FunctionSearchDesc.read(in);
+
+        Assert.assertEquals(desc.getName(), desc2.getName());
+        Assert.assertNotNull(desc.getArgTypes());
+        Assert.assertNotNull(desc2.getArgTypes());
+        Assert.assertEquals(desc.getArgTypes()[0], desc2.getArgTypes()[0]);
+
+        // 3. delete files
+        in.close();
+        Files.delete(path);
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to