This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.3.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git


The following commit(s) were added to refs/heads/dev-1.3.1 by this push:
     new 1613060a4 add linkis jobhistory unit test (#3515)
1613060a4 is described below

commit 1613060a4ac547839034234dd21e6e677e95b9f5
Author: ruY <[email protected]>
AuthorDate: Mon Oct 10 17:16:36 2022 +0800

    add linkis jobhistory unit test (#3515)
    
    * feat: unit test in jobhistory
---
 .../jobhistory/dao/impl/JobHistoryMapper.xml       |   6 +-
 .../jobhistory/dao/JobHistoryMapperTest.java       | 168 +++++++++++++++++++++
 .../service/JobHistoryQueryServiceTest.java        | 130 ++++++++++++++++
 .../src/test/resources/create.sql                  |  31 +++-
 .../conf/JobhistoryConfigurationTest.scala         |  49 ++++++
 .../linkis/jobhistory/util/QueryConfigTest.scala}  |  38 ++---
 .../linkis/jobhistory/util/QueryUtilsTest.scala    |  60 ++++++++
 7 files changed, 461 insertions(+), 21 deletions(-)

diff --git 
a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/impl/JobHistoryMapper.xml
 
b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/impl/JobHistoryMapper.xml
index 7c0ea434f..6eb48f122 100644
--- 
a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/impl/JobHistoryMapper.xml
+++ 
b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/impl/JobHistoryMapper.xml
@@ -158,7 +158,7 @@
             <if test="instances != null">instances = #{instances},</if>
             <if test="metrics != null">metrics = #{metrics},</if>
             <if test="engineType != null">engine_type = #{engineType},</if>
-            <if test="executionCode != null">and execution_code = 
#{executionCode},</if>
+            <if test="executionCode != null">execution_code = 
#{executionCode},</if>
         </trim>
         <![CDATA[
         WHERE id = #{id} AND updated_time <= #{updateTimeMills}
@@ -167,9 +167,9 @@
 
     <select id="selectJobHistoryStatusForUpdate" flushCache="true" 
resultType="java.lang.String">
         SELECT
-        bdt.`status`
+        bdt.status
         FROM
-        `linkis_ps_job_history_group_history` bdt
+        linkis_ps_job_history_group_history bdt
         WHERE
         id = #{jobId} FOR UPDATE
     </select>
diff --git 
a/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java
 
b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java
new file mode 100644
index 000000000..edf235864
--- /dev/null
+++ 
b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.jobhistory.dao;
+
+import org.apache.linkis.jobhistory.entity.JobHistory;
+
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class JobHistoryMapperTest extends BaseDaoTest {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(JobHistoryMapperTest.class);
+
+  @Autowired private JobHistoryMapper jobHistoryMapper;
+
+  private JobHistory createJobHistory() {
+    JobHistory jobHistory = new JobHistory();
+    jobHistory.setJobReqId("LINKISCLI_hadoop_spark_1");
+    jobHistory.setSubmitUser("hadoop");
+    jobHistory.setExecuteUser("hadoop");
+    
jobHistory.setSource("{\"scriptPath\":\"LinkisCli\",\"requestIP\":\"127.0.0.1\"}");
+    jobHistory.setLabels(
+        
"{\"userCreator\":\"hadoop-LINKISCLI\",\"engineType\":\"spark-3.0.1\",\"codeType\":\"sql\",\"executeOnce\":\"\"}");
+    jobHistory.setParams(
+        
"{\"configuration\":{\"startup\":{},\"runtime\":{\"hive.resultset.use.unique.column.names\":true,\"wds.linkis.resultSet.store.path\":\"hdfs:///tmp/linkis/hadoop/linkis/20220714_190204/LINKISCLI/3\",\"source\":{\"scriptPath\":\"LinkisCli\",\"requestIP\":\"127.0.0.1\"},\"job\":{\"resultsetIndex\":0,\"#rt_rs_store_path\":\"hdfs:///tmp/linkis/hadoop/linkis/20220714_190204/LINKISCLI/3\"}}},\"variable\":{}}");
+    jobHistory.setParams("1.0");
+    jobHistory.setStatus("Succeed");
+    
jobHistory.setLogPath("hdfs:///tmp/linkis/log/2022-07-14/LINKISCLI/hadoop/3.log");
+    jobHistory.setErrorCode(0);
+    jobHistory.setCreatedTime(new Date());
+    jobHistory.setUpdatedTime(new Date());
+    jobHistory.setInstances("127.0.0.1:9104");
+    jobHistory.setMetrics(
+        
"{\"scheduleTime\":\"2022-07-14T19:02:05+0800\",\"timeToOrchestrator\":\"2022-07-14T19:02:05+0800\",\"submitTime\":\"2022-07-14T19:02:04+0800\",\"yarnResource\":{\"application_1657595967414_0005\":{\"queueMemory\":1073741824,\"queueCores\":1,\"queueInstances\":0,\"jobStatus\":\"COMPLETED\",\"queue\":\"default\"}},\"completeTime\":\"2022-07-14T19:03:08+0800\"}");
+    jobHistory.setEngineType("spark");
+    jobHistory.setExecutionCode("show databases;");
+    
jobHistory.setResultLocation("hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1");
+    return jobHistory;
+  }
+
+  @Test
+  @DisplayName("selectJobHistoryTest")
+  public void selectJobHistoryTest() {
+    JobHistory jobHistory = new JobHistory();
+    jobHistory.setId(1L);
+    List<JobHistory> histories = jobHistoryMapper.selectJobHistory(jobHistory);
+    Assertions.assertTrue(histories.size() > 0);
+  }
+
+  @Test
+  @DisplayName("insertJobHistoryTest")
+  public void insertJobHistoryTest() {
+
+    JobHistory jobHistory = createJobHistory();
+    jobHistoryMapper.insertJobHistory(jobHistory);
+    List<JobHistory> histories = jobHistoryMapper.selectJobHistory(jobHistory);
+    Assertions.assertTrue(histories.size() == 1);
+  }
+
+  @Test
+  @DisplayName("updateJobHistoryTest")
+  public void updateJobHistoryTest() {
+    JobHistory jobHistory = createJobHistory();
+    jobHistory.setId(1L);
+    jobHistoryMapper.updateJobHistory(jobHistory);
+    List<JobHistory> histories = jobHistoryMapper.selectJobHistory(jobHistory);
+    Assertions.assertEquals("LINKISCLI_hadoop_spark_1", 
histories.get(0).getJobReqId());
+  }
+
+  @Test
+  @DisplayName("searchWithIdOrderAscTest")
+  public void searchWithIdOrderAscTest() {
+
+    List<String> status = new ArrayList<>();
+    status.add("Succeed");
+    List<JobHistory> histories =
+        jobHistoryMapper.searchWithIdOrderAsc(1L, "hadoop", status, null, 
null, "spark");
+    Assertions.assertTrue(histories.size() > 0);
+  }
+
+  @Test
+  @DisplayName("searchTest")
+  public void searchTest() {
+
+    List<String> status = new ArrayList<>();
+    status.add("Succeed");
+    List<JobHistory> histories =
+        jobHistoryMapper.search(1L, "hadoop", status, null, null, "spark", 1L);
+    Assertions.assertTrue(histories.size() > 0);
+  }
+
+  @Test
+  @DisplayName("searchWithUserCreatorTest")
+  public void searchWithUserCreatorTest() {
+
+    List<String> status = new ArrayList<>();
+    status.add("Succeed");
+    List<JobHistory> histories =
+        jobHistoryMapper.searchWithUserCreator(
+            1L, "hadoop", null, null, status, null, null, "spark", 1L);
+    Assertions.assertTrue(histories.size() > 0);
+  }
+
+  @Test
+  @DisplayName("searchWithCreatorOnlyTest")
+  public void searchWithCreatorOnlyTest() {
+
+    List<String> status = new ArrayList<>();
+    status.add("Succeed");
+    List<JobHistory> histories =
+        jobHistoryMapper.searchWithCreatorOnly(
+            1L, "hadoop", null, "hadoop", status, null, null, "spark", 1L);
+    Assertions.assertTrue(histories.size() > 0);
+  }
+
+  @Test
+  @DisplayName("countUndoneTaskNoCreatorTest")
+  public void countUndoneTaskNoCreatorTest() {
+    List<String> status = new ArrayList<>();
+    status.add("Succeed");
+    Integer counts =
+        jobHistoryMapper.countUndoneTaskNoCreator("hadoop", status, null, 
null, "spark", 1L);
+    Assertions.assertTrue(counts.intValue() > 0);
+  }
+
+  @Test
+  @DisplayName("countUndoneTaskWithUserCreatorTest")
+  public void countUndoneTaskWithUserCreatorTest() {
+    List<String> status = new ArrayList<>();
+    status.add("Succeed");
+    Integer counts =
+        jobHistoryMapper.countUndoneTaskWithUserCreator(
+            "hadoop", null, "hadoop", status, null, null, "spark", 1L);
+    Assertions.assertTrue(counts.intValue() > 0);
+  }
+
+  @Test
+  @DisplayName("selectJobHistoryStatusForUpdateTest")
+  public void selectJobHistoryStatusForUpdateTest() {
+
+    String status = jobHistoryMapper.selectJobHistoryStatusForUpdate(1L);
+    Assertions.assertEquals("Succeed", status);
+  }
+}
diff --git 
a/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/service/JobHistoryQueryServiceTest.java
 
b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/service/JobHistoryQueryServiceTest.java
new file mode 100644
index 000000000..810c30e85
--- /dev/null
+++ 
b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/service/JobHistoryQueryServiceTest.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.jobhistory.service;
+
+import org.apache.linkis.governance.common.entity.job.JobRequest;
+import org.apache.linkis.governance.common.protocol.job.*;
+import org.apache.linkis.jobhistory.dao.JobHistoryMapper;
+import org.apache.linkis.jobhistory.entity.JobHistory;
+import org.apache.linkis.jobhistory.service.impl.JobHistoryQueryServiceImpl;
+
+import java.util.*;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+@ExtendWith(MockitoExtension.class)
+public class JobHistoryQueryServiceTest {
+
+  @InjectMocks JobHistoryQueryServiceImpl jobRequestQueryService;
+
+  @Mock JobHistoryMapper jobRequestMapper;
+
+  private JobRequest createJobRequest() {
+    JobRequest jobRequest = new JobRequest();
+    jobRequest.setReqId("LINKISCLI_hadoop_spark_1");
+    jobRequest.setSubmitUser("hadoop");
+    jobRequest.setExecuteUser("hadoop");
+    jobRequest.setSource(new HashMap<>());
+    jobRequest.setLabels(new ArrayList<>());
+    jobRequest.setParams(new HashMap<>());
+    jobRequest.setStatus("Succeed");
+    
jobRequest.setLogPath("hdfs:///tmp/linkis/log/2022-07-14/LINKISCLI/hadoop/3.log");
+    jobRequest.setErrorCode(0);
+    jobRequest.setCreatedTime(new Date());
+    jobRequest.setUpdatedTime(new Date());
+    jobRequest.setInstances("127.0.0.1:9104");
+    jobRequest.setMetrics(new HashMap<>());
+    jobRequest.setExecutionCode("show databases;");
+    
jobRequest.setResultLocation("hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1");
+    return jobRequest;
+  }
+
+  @Test
+  @DisplayName("addTest")
+  public void addTest() {
+    JobReqInsert jobReqInsert = new JobReqInsert(createJobRequest());
+    JobRespProtocol jobRespProtocol = jobRequestQueryService.add(jobReqInsert);
+    Assertions.assertNotNull(jobRespProtocol);
+  }
+
+  @Test
+  @DisplayName("changeTest")
+  public void changeTest() {
+    JobReqUpdate jobReqUpdate = new JobReqUpdate(createJobRequest());
+    JobRespProtocol jobRespProtocol = 
jobRequestQueryService.change(jobReqUpdate);
+    Assertions.assertNotNull(jobRespProtocol);
+  }
+
+  @Test
+  @DisplayName("batchChangeTest")
+  public void batchChangeTest() {
+
+    JobReqBatchUpdate jobReqBatchUpdate =
+        new JobReqBatchUpdate(new 
ArrayList<>(Arrays.asList(createJobRequest())));
+    ArrayList<JobRespProtocol> protocols = 
jobRequestQueryService.batchChange(jobReqBatchUpdate);
+    Assertions.assertTrue(protocols.size() > 0);
+  }
+
+  @Test
+  @DisplayName("queryTest")
+  public void queryTest() {
+    JobReqQuery jobReqQuery = new JobReqQuery(createJobRequest());
+    JobRespProtocol jobRespProtocol = 
jobRequestQueryService.query(jobReqQuery);
+    Assertions.assertNotNull(jobRespProtocol);
+  }
+
+  @Test
+  @DisplayName("getJobHistoryByIdAndNameTest")
+  public void getJobHistoryByIdAndNameTest() {
+    JobHistory history = jobRequestQueryService.getJobHistoryByIdAndName(1L, 
"hadoop");
+    Assertions.assertNull(history);
+  }
+
+  @Test
+  @DisplayName("searchTest")
+  public void searchTest() {
+    List<JobHistory> histories =
+        jobRequestQueryService.search(
+            1L, "hadoop", "hadoop", "Succeed", new Date(), new Date(), 
"spark", 1L);
+    Assertions.assertTrue(histories.size() == 0);
+  }
+
+  @Test
+  @DisplayName("countUndoneTasksTest")
+  public void countUndoneTasksTest() {
+
+    Integer counts =
+        jobRequestQueryService.countUndoneTasks(
+            "hadoop", "hadoop", new Date(), new Date(), "spark", 1L);
+    Assertions.assertTrue(counts.intValue() == 0);
+  }
+
+  @Test
+  @DisplayName("searchOneTest")
+  public void searchOneTest() {
+
+    JobHistory jobHistory = jobRequestQueryService.searchOne(1L, new Date(), 
new Date());
+    Assertions.assertNotNull(jobHistory);
+  }
+}
diff --git 
a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql 
b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql
index 64a18a04b..a27c18406 100644
--- a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql
+++ b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql
@@ -31,4 +31,33 @@ CREATE TABLE IF NOT EXISTS linkis_ps_job_history_detail (
   status varchar(32),
   priority integer,
   PRIMARY KEY (id)
-);
\ No newline at end of file
+);
+
+DROP TABLE IF EXISTS linkis_ps_job_history_group_history CASCADE;
+CREATE TABLE linkis_ps_job_history_group_history (
+  id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary Key, auto increment',
+  job_req_id varchar(64) DEFAULT NULL COMMENT 'job execId',
+  submit_user varchar(50) DEFAULT NULL COMMENT 'who submitted this Job',
+  execute_user varchar(50) DEFAULT NULL COMMENT 'who actually executed this 
Job',
+  source text COMMENT 'job source',
+  labels text COMMENT 'job labels',
+  params text COMMENT 'job params',
+  progress varchar(32) DEFAULT NULL COMMENT 'Job execution progress',
+  status varchar(50) DEFAULT NULL COMMENT 'Script execution status, must be 
one of the following: Inited, WaitForRetry, Scheduled, Running, Succeed, 
Failed, Cancelled, Timeout',
+  log_path varchar(200) DEFAULT NULL COMMENT 'File path of the job log',
+  error_code int(11) DEFAULT NULL COMMENT 'Error code. Generated when the 
execution of the script fails',
+  error_desc varchar(1000) DEFAULT NULL COMMENT 'Execution description. 
Generated when the execution of script fails',
+  created_time datetime(3) DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Creation 
time',
+  updated_time datetime(3) DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Update time',
+  instances varchar(250) DEFAULT NULL COMMENT 'Entrance instances',
+  metrics text COMMENT 'Job Metrics',
+  engine_type varchar(32) DEFAULT NULL COMMENT 'Engine type',
+  execution_code text COMMENT 'Job origin code or code path',
+  result_location varchar(500) DEFAULT NULL COMMENT 'File path of the 
resultsets',
+  PRIMARY KEY (id),
+  KEY created_time (created_time),
+  KEY submit_user (submit_user)
+) ;
+
+INSERT INTO linkis_ps_job_history_group_history 
(job_req_id,submit_user,execute_user,source,labels,params,progress,status,log_path,error_code,error_desc,created_time,updated_time,instances,metrics,engine_type,execution_code,result_location)
 VALUES
+        
('LINKISCLI_hadoop_spark_0','hadoop','hadoop','{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"}','{"userCreator":"hadoop-LINKISCLI","engineType":"spark-3.0.1","codeType":"sql","executeOnce":""}','{"configuration":{"startup":{},"runtime":{"hive.resultset.use.unique.column.names":true,"wds.linkis.resultSet.store.path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1","source":{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"},"job":{"resultsetIndex":0,"#rt_rs_store_path":"
 [...]
diff --git 
a/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/conf/JobhistoryConfigurationTest.scala
 
b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/conf/JobhistoryConfigurationTest.scala
new file mode 100644
index 000000000..df65f8015
--- /dev/null
+++ 
b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/conf/JobhistoryConfigurationTest.scala
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.jobhistory.conf
+
+import org.junit.jupiter.api.{Assertions, DisplayName, Test}
+
+class JobhistoryConfigurationTest {
+
+  @Test
+  @DisplayName("constTest")
+  def constTest(): Unit = {
+
+    val governaceStationAdmin = 
JobhistoryConfiguration.GOVERNANCE_STATION_ADMIN.getValue
+    val jobHistorySafeTrigger = 
JobhistoryConfiguration.JOB_HISTORY_SAFE_TRIGGER
+    val entranceSpringName = 
JobhistoryConfiguration.ENTRANCE_SPRING_NAME.getValue
+    val entranceInstanceDelemiter = 
JobhistoryConfiguration.ENTRANCE_INSTANCE_DELEMITER.getValue
+    val updateRetryTimes = JobhistoryConfiguration.UPDATE_RETRY_TIMES.getValue
+    val updateRetryInterval = 
JobhistoryConfiguration.UPDATE_RETRY_INTERVAL.getValue
+    val undoneJobMinimum = 
JobhistoryConfiguration.UNDONE_JOB_MINIMUM_ID.getValue
+    val undoneJobRefreshTimeDaily = 
JobhistoryConfiguration.UNDONE_JOB_REFRESH_TIME_DAILY.getValue
+
+    Assertions.assertEquals("hadoop", governaceStationAdmin)
+    Assertions.assertTrue(jobHistorySafeTrigger.booleanValue())
+    Assertions.assertNotNull(entranceSpringName)
+    Assertions.assertNotNull(entranceInstanceDelemiter)
+
+    Assertions.assertTrue(updateRetryTimes == 3)
+    Assertions.assertTrue(updateRetryInterval == 3000)
+
+    Assertions.assertTrue(undoneJobMinimum == 0L)
+    Assertions.assertNotNull(undoneJobRefreshTimeDaily)
+  }
+
+}
diff --git 
a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql 
b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryConfigTest.scala
similarity index 52%
copy from 
linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql
copy to 
linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryConfigTest.scala
index 64a18a04b..980623873 100644
--- a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql
+++ 
b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryConfigTest.scala
@@ -13,22 +13,26 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
-*/
+ */
 
-SET FOREIGN_KEY_CHECKS=0;
-SET REFERENTIAL_INTEGRITY FALSE;
+package org.apache.linkis.jobhistory.util
 
-DROP TABLE IF EXISTS linkis_ps_job_history_detail CASCADE;
-CREATE TABLE IF NOT EXISTS linkis_ps_job_history_detail (
-  id numeric(20) NOT NULL AUTO_INCREMENT,
-  job_history_id numeric(20) NOT NULL,
-  result_location varchar(500),
-  execution_content text,
-  result_array_size integer,
-  job_group_info text,
-  created_time datetime(3),
-  updated_time datetime(3),
-  status varchar(32),
-  priority integer,
-  PRIMARY KEY (id)
-);
\ No newline at end of file
+import org.junit.jupiter.api.{Assertions, DisplayName, Test}
+
+class QueryConfigTest {
+
+  @Test
+  @DisplayName("constTest")
+  def constTest(): Unit = {
+    val cacheMaxExpireHour = QueryConfig.CACHE_MAX_EXPIRE_HOUR.getValue
+    val cacheDailyExpireEnable = 
QueryConfig.CACHE_DAILY_EXPIRE_ENABLED.getValue
+    val cacheMaxSize = QueryConfig.CACHE_MAX_SIZE.getValue
+    val cacheCleaningIntervalMinute = 
QueryConfig.CACHE_CLEANING_INTERVAL_MINUTE.getValue
+
+    Assertions.assertTrue(cacheMaxExpireHour == 1L)
+    Assertions.assertTrue(cacheDailyExpireEnable)
+    Assertions.assertTrue(cacheMaxSize == 10000L)
+    Assertions.assertTrue(cacheCleaningIntervalMinute == 30)
+  }
+
+}
diff --git 
a/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryUtilsTest.scala
 
b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryUtilsTest.scala
new file mode 100644
index 000000000..43b740482
--- /dev/null
+++ 
b/linkis-public-enhancements/linkis-jobhistory/src/test/scala/org/apache/linkis/jobhistory/util/QueryUtilsTest.scala
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.jobhistory.util
+
+import java.util.Date
+
+import org.junit.jupiter.api.{Assertions, DisplayName, Test}
+
+class QueryUtilsTest {
+
+  @Test
+  @DisplayName("isJobHistoryAdminTest")
+  def isJobHistoryAdminTest(): Unit = {
+    val defaultName = "hadoop"
+    val otherName = "hadoops"
+    val defaultVal = QueryUtils.isJobHistoryAdmin(defaultName)
+    val otherVal = QueryUtils.isJobHistoryAdmin(otherName)
+
+    Assertions.assertTrue(defaultVal)
+    Assertions.assertFalse(otherVal)
+  }
+
+  @Test
+  @DisplayName("getJobHistoryAdminTest")
+  def getJobHistoryAdminTest(): Unit = {
+    val admins = QueryUtils.getJobHistoryAdmin()
+    Assertions.assertTrue(admins.size == 1)
+  }
+
+  @Test
+  @DisplayName("dateToStringTest")
+  def dateToStringTest(): Unit = {
+    val dateStr = QueryUtils.dateToString(new Date)
+    Assertions.assertNotNull(dateStr)
+  }
+
+  @Test
+  @DisplayName("checkNameValidTest")
+  def checkNameValidTest(): Unit = {
+    val name = "hadoops"
+    val bool = QueryUtils.checkNameValid(name)
+    Assertions.assertTrue(bool)
+  }
+
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to