This is an automated email from the ASF dual-hosted git repository.

klesh pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git


The following commit(s) were added to refs/heads/main by this push:
     new 84efcdcb2 feat(customize): add CSV import functionality for sprints 
and issue c… (#8456)
84efcdcb2 is described below

commit 84efcdcb2d2dc74d7f7b121e7696e9a3543b10d2
Author: NaRro <cong.w...@merico.dev>
AuthorDate: Fri Jul 18 05:35:12 2025 +0000

    feat(customize): add CSV import functionality for sprints and issue c… 
(#8456)
    
    * feat(customize): add CSV import functionality for sprints and issue 
changelogs/worklogs
    
    - Add new API endpoints for importing sprints, issue changelogs, and issue 
worklogs from CSV files
    - Implement corresponding handler functions to process the uploaded CSV 
files
    - Add e2e tests to verify the import functionality for sprints, issue 
changelogs, and issue worklogs
    - Update the plugin's ApiResources map to include the new endpoints
    
    #8446
    
    * fix(customize): delete board_sprints before import sprints
---
 backend/plugins/customize/api/csv_issue.go         |  93 ++++++++++++
 .../customize/e2e/import_issue_changelogs_test.go  | 101 +++++++++++++
 .../customize/e2e/import_issue_worklogs_test.go    | 101 +++++++++++++
 .../plugins/customize/e2e/import_issues_test.go    |  10 ++
 .../plugins/customize/e2e/import_sprint_test.go    |  85 +++++++++++
 .../customize/e2e/raw_tables/issue_changelogs.csv  |   7 +
 .../raw_tables/issue_changelogs_incremental.csv    |   5 +
 .../customize/e2e/raw_tables/issue_worklogs.csv    |   4 +
 .../e2e/raw_tables/issue_worklogs_incremental.csv  |   3 +
 .../customize/e2e/raw_tables/issues_input.csv      |   8 +-
 .../e2e/raw_tables/issues_input_incremental.csv    |   8 +-
 .../plugins/customize/e2e/raw_tables/sprints.csv   |   3 +
 .../e2e/raw_tables/sprints_incremental.csv         |   2 +
 .../accounts_from_issue_changelogs.csv             |   6 +
 .../accounts_from_issue_worklogs.csv               |   4 +
 .../e2e/snapshot_tables/board_sprints.csv          |   4 +
 .../e2e/snapshot_tables/issue_changelogs.csv       |   7 +
 .../issue_changelogs_incremental.csv               |  11 ++
 .../e2e/snapshot_tables/issue_worklogs.csv         |   4 +
 .../snapshot_tables/issue_worklogs_incremental.csv |   5 +
 .../e2e/snapshot_tables/sprint_issues.csv          |   8 +
 .../customize/e2e/snapshot_tables/sprints.csv      |   4 +
 backend/plugins/customize/impl/impl.go             |   9 ++
 backend/plugins/customize/service/service.go       | 162 ++++++++++++++++++++-
 24 files changed, 645 insertions(+), 9 deletions(-)

diff --git a/backend/plugins/customize/api/csv_issue.go 
b/backend/plugins/customize/api/csv_issue.go
index 51ea626c8..2e9aaddd9 100644
--- a/backend/plugins/customize/api/csv_issue.go
+++ b/backend/plugins/customize/api/csv_issue.go
@@ -124,6 +124,99 @@ func (h *Handlers) ImportIssueRepoCommit(input 
*plugin.ApiResourceInput) (*plugi
        return nil, h.svc.ImportIssueRepoCommit(boardId, file, incremental)
 }
 
+// ImportSprint accepts a CSV file, parses and saves it to the database
+// @Summary      Upload sprints.csv file
+// @Description  Upload sprints.csv file
+// @Tags                plugins/customize
+// @Accept       multipart/form-data
+// @Param        boardId formData string true "the ID of the board"
+// @Param        file formData file true "select file to upload"
+// @Param        incremental formData string true "whether to save only new 
data"
+// @Produce      json
+// @Success      200
+// @Failure 400  {object} shared.ApiBody "Bad Request"
+// @Failure 500  {object} shared.ApiBody "Internal Error"
+// @Router       /plugins/customize/csvfiles/sprints.csv [post]
+func (h *Handlers) ImportSprint(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
+       file, err := h.extractFile(input)
+       if err != nil {
+               return nil, err
+       }
+       // nolint
+       defer file.Close()
+       boardId := strings.TrimSpace(input.Request.FormValue("boardId"))
+       if boardId == "" {
+               return nil, errors.Default.New("empty boardId")
+       }
+       incremental := false
+       if input.Request.FormValue("incremental") == "true" {
+               incremental = true
+       }
+       return nil, h.svc.ImportSprint(boardId, file, incremental)
+}
+
+// ImportIssueChangelog accepts a CSV file, parses and saves it to the database
+// @Summary      Upload issue_changelogs.csv file
+// @Description  Upload issue_changelogs.csv file
+// @Tags                plugins/customize
+// @Accept       multipart/form-data
+// @Param        boardId formData string true "the ID of the board"
+// @Param        file formData file true "select file to upload"
+// @Param               incremental formData boolean false "Whether to 
incrementally update changelogs" default(false)
+// @Produce      json
+// @Success      200
+// @Failure 400  {object} shared.ApiBody "Bad Request"
+// @Failure 500  {object} shared.ApiBody "Internal Error"
+// @Router       /plugins/customize/csvfiles/issue_changelogs.csv [post]
+func (h *Handlers) ImportIssueChangelog(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
+       file, err := h.extractFile(input)
+       if err != nil {
+               return nil, err
+       }
+       // nolint
+       defer file.Close()
+       boardId := strings.TrimSpace(input.Request.FormValue("boardId"))
+       if boardId == "" {
+               return nil, errors.Default.New("empty boardId")
+       }
+       incremental := false
+       if input.Request.FormValue("incremental") == "true" {
+               incremental = true
+       }
+       return nil, h.svc.ImportIssueChangelog(boardId, file, incremental)
+}
+
+// ImportIssueWorklog accepts a CSV file, parses and saves it to the database
+// @Summary      Upload issue_worklogs.csv file
+// @Description  Upload issue_worklogs.csv file
+// @Tags                plugins/customize
+// @Accept       multipart/form-data
+// @Param        boardId formData string true "the ID of the board"
+// @Param        file formData file true "select file to upload"
+// @Param        incremental formData boolean false "Whether to do incremental 
sync (default false
+// @Produce      json
+// @Success      200
+// @Failure 400  {object} shared.ApiBody "Bad Request"
+// @Failure 500  {object} shared.ApiBody "Internal Error"
+// @Router       /plugins/customize/csvfiles/issue_worklogs.csv [post]
+func (h *Handlers) ImportIssueWorklog(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
+       file, err := h.extractFile(input)
+       if err != nil {
+               return nil, err
+       }
+       // nolint
+       defer file.Close()
+       boardId := strings.TrimSpace(input.Request.FormValue("boardId"))
+       if boardId == "" {
+               return nil, errors.Default.New("empty boardId")
+       }
+       incremental := false
+       if input.Request.FormValue("incremental") == "true" {
+               incremental = true
+       }
+       return nil, h.svc.ImportIssueWorklog(boardId, file, incremental)
+}
+
 func (h *Handlers) extractFile(input *plugin.ApiResourceInput) (io.ReadCloser, 
errors.Error) {
        if input.Request == nil {
                return nil, errors.Default.New("request is nil")
diff --git a/backend/plugins/customize/e2e/import_issue_changelogs_test.go 
b/backend/plugins/customize/e2e/import_issue_changelogs_test.go
new file mode 100644
index 000000000..c62bad04e
--- /dev/null
+++ b/backend/plugins/customize/e2e/import_issue_changelogs_test.go
@@ -0,0 +1,101 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package e2e
+
+import (
+       "os"
+       "testing"
+
+       
"github.com/apache/incubator-devlake/core/models/domainlayer/crossdomain"
+       "github.com/apache/incubator-devlake/core/models/domainlayer/ticket"
+       "github.com/apache/incubator-devlake/helpers/e2ehelper"
+       "github.com/apache/incubator-devlake/plugins/customize/impl"
+       "github.com/apache/incubator-devlake/plugins/customize/service"
+)
+
+func TestImportIssueChangelogDataFlow(t *testing.T) {
+       var plugin impl.Customize
+       dataflowTester := e2ehelper.NewDataFlowTester(t, "customize", plugin)
+
+       // 清空表
+       dataflowTester.FlushTabler(&ticket.IssueChangelogs{})
+    dataflowTester.FlushTabler(&crossdomain.Account{})
+
+       // 初始化服务
+       svc := service.NewService(dataflowTester.Dal)
+
+       // 导入全量数据
+       changelogFile, err := os.Open("raw_tables/issue_changelogs.csv")
+       if err != nil {
+               t.Fatal(err)
+       }
+       defer changelogFile.Close()
+       err = svc.ImportIssueChangelog("TEST_BOARD", changelogFile, false)
+       if err != nil {
+               t.Fatal(err)
+       }
+
+       // 验证全量导入结果
+       dataflowTester.VerifyTableWithRawData(
+               ticket.IssueChangelogs{},
+               "snapshot_tables/issue_changelogs.csv",
+               []string{
+                       "id",
+                       "issue_id",
+                       "author_id",
+                       "field_name",
+                       "original_from_value",
+                       "original_to_value",
+                       "created_date",
+               })
+
+       // 导入增量数据
+       incrementalFile, err := 
os.Open("raw_tables/issue_changelogs_incremental.csv")
+       if err != nil {
+               t.Fatal(err)
+       }
+       defer incrementalFile.Close()
+       err = svc.ImportIssueChangelog("TEST_BOARD", incrementalFile, true)
+       if err != nil {
+               t.Fatal(err)
+       }
+
+       // 验证增量导入结果
+       dataflowTester.VerifyTableWithRawData(
+               ticket.IssueChangelogs{},
+               "snapshot_tables/issue_changelogs_incremental.csv",
+               []string{
+                       "id",
+                       "issue_id",
+                       "author_id",
+                       "field_name",
+                       "original_from_value",
+                       "original_to_value",
+                       "created_date",
+               })
+
+       dataflowTester.VerifyTable(
+               crossdomain.Account{},
+               "snapshot_tables/accounts_from_issue_changelogs.csv",
+               []string{
+                       "id",
+                       "full_name",
+                       "user_name",
+               },
+       )
+}
diff --git a/backend/plugins/customize/e2e/import_issue_worklogs_test.go 
b/backend/plugins/customize/e2e/import_issue_worklogs_test.go
new file mode 100644
index 000000000..71ed67e6b
--- /dev/null
+++ b/backend/plugins/customize/e2e/import_issue_worklogs_test.go
@@ -0,0 +1,101 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package e2e
+
+import (
+       "os"
+       "testing"
+
+       
"github.com/apache/incubator-devlake/core/models/domainlayer/crossdomain"
+       "github.com/apache/incubator-devlake/core/models/domainlayer/ticket"
+       "github.com/apache/incubator-devlake/helpers/e2ehelper"
+       "github.com/apache/incubator-devlake/plugins/customize/impl"
+       "github.com/apache/incubator-devlake/plugins/customize/service"
+)
+
+func TestImportIssueWorklogDataFlow(t *testing.T) {
+       var plugin impl.Customize
+       dataflowTester := e2ehelper.NewDataFlowTester(t, "customize", plugin)
+
+       // 清空表
+       dataflowTester.FlushTabler(&ticket.IssueWorklog{})
+       dataflowTester.FlushTabler(&crossdomain.Account{})
+
+       // 初始化服务
+       svc := service.NewService(dataflowTester.Dal)
+
+       // 导入全量数据
+       worklogFile, err := os.Open("raw_tables/issue_worklogs.csv")
+       if err != nil {
+               t.Fatal(err)
+       }
+       defer worklogFile.Close()
+       err = svc.ImportIssueWorklog("TEST_BOARD", worklogFile, false)
+       if err != nil {
+               t.Fatal(err)
+       }
+
+       // 验证全量导入结果
+       dataflowTester.VerifyTableWithRawData(
+               ticket.IssueWorklog{},
+               "snapshot_tables/issue_worklogs.csv",
+               []string{
+                       "id",
+                       "issue_id",
+                       "author_id",
+                       "time_spent_minutes",
+                       "started_date",
+            "logged_date",
+                       "comment",
+               })
+
+       // 导入增量数据
+       incrementalFile, err := 
os.Open("raw_tables/issue_worklogs_incremental.csv")
+       if err != nil {
+               t.Fatal(err)
+       }
+       defer incrementalFile.Close()
+       err = svc.ImportIssueWorklog("TEST_BOARD", incrementalFile, true)
+       if err != nil {
+               t.Fatal(err)
+       }
+
+       // 验证增量导入结果
+       dataflowTester.VerifyTableWithRawData(
+               ticket.IssueWorklog{},
+               "snapshot_tables/issue_worklogs_incremental.csv",
+               []string{
+                       "id",
+                       "issue_id",
+                       "author_id",
+                       "time_spent_minutes",
+                       "started_date",
+            "logged_date",
+                       "comment",
+               })
+
+       dataflowTester.VerifyTable(
+               crossdomain.Account{},
+               "snapshot_tables/accounts_from_issue_worklogs.csv",
+               []string{
+                       "id",
+                       "full_name",
+                       "user_name",
+               },
+       )
+}
diff --git a/backend/plugins/customize/e2e/import_issues_test.go 
b/backend/plugins/customize/e2e/import_issues_test.go
index 82de28d09..576fdcddb 100644
--- a/backend/plugins/customize/e2e/import_issues_test.go
+++ b/backend/plugins/customize/e2e/import_issues_test.go
@@ -39,6 +39,7 @@ func TestImportIssueDataFlow(t *testing.T) {
        dataflowTester.FlushTabler(&ticket.IssueLabel{})
        dataflowTester.FlushTabler(&ticket.BoardIssue{})
        dataflowTester.FlushTabler(&crossdomain.Account{})
+       dataflowTester.FlushTabler(&ticket.SprintIssue{})
        svc := service.NewService(dataflowTester.Dal)
        err := svc.CreateField(&models.CustomizedField{
                TbName:      "issues",
@@ -183,4 +184,13 @@ func TestImportIssueDataFlow(t *testing.T) {
                        "user_name",
                },
        )
+
+       dataflowTester.VerifyTableWithRawData(
+               &ticket.SprintIssue{},
+               "snapshot_tables/sprint_issues.csv",
+               []string{
+                       "sprint_id",
+                       "issue_id",
+               },
+       )
 }
diff --git a/backend/plugins/customize/e2e/import_sprint_test.go 
b/backend/plugins/customize/e2e/import_sprint_test.go
new file mode 100644
index 000000000..33a2a30f8
--- /dev/null
+++ b/backend/plugins/customize/e2e/import_sprint_test.go
@@ -0,0 +1,85 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package e2e
+
+import (
+       "os"
+       "testing"
+
+       "github.com/apache/incubator-devlake/core/models/domainlayer/ticket"
+       "github.com/apache/incubator-devlake/helpers/e2ehelper"
+       "github.com/apache/incubator-devlake/plugins/customize/impl"
+       "github.com/apache/incubator-devlake/plugins/customize/service"
+)
+
+func TestImportSprintDataFlow(t *testing.T) {
+       var plugin impl.Customize
+       dataflowTester := e2ehelper.NewDataFlowTester(t, "customize", plugin)
+
+       // 创建表
+       dataflowTester.FlushTabler(&ticket.Sprint{})
+       dataflowTester.FlushTabler(&ticket.BoardSprint{})
+
+       // 导入必要数据
+       svc := service.NewService(dataflowTester.Dal)
+
+       // 导入全量数据
+       sprintFile, err := os.Open("raw_tables/sprints.csv")
+       if err != nil {
+               t.Fatal(err)
+       }
+       defer sprintFile.Close()
+       err = svc.ImportSprint("csv-board", sprintFile, false)
+       if err != nil {
+               t.Fatal(err)
+       }
+
+       // 导入增量数据
+       sprintIncrementalFile, err := 
os.Open("raw_tables/sprints_incremental.csv")
+       if err != nil {
+               t.Fatal(err)
+       }
+       defer sprintIncrementalFile.Close()
+       err = svc.ImportSprint("csv-board", sprintIncrementalFile, true)
+       if err != nil {
+               t.Fatal(err)
+       }
+
+       // 验证结果
+       dataflowTester.VerifyTableWithRawData(
+               ticket.Sprint{},
+               "snapshot_tables/sprints.csv",
+               []string{
+                       "id",
+                       "url",
+                       "status",
+                       "name",
+                       "started_date",
+                       "ended_date",
+                       "completed_date",
+                       "original_board_id",
+               })
+
+       dataflowTester.VerifyTableWithRawData(
+               ticket.BoardSprint{},
+               "snapshot_tables/board_sprints.csv",
+               []string{
+                       "board_id",
+                       "sprint_id",
+               })
+}
diff --git a/backend/plugins/customize/e2e/raw_tables/issue_changelogs.csv 
b/backend/plugins/customize/e2e/raw_tables/issue_changelogs.csv
new file mode 100644
index 000000000..7342bbdca
--- /dev/null
+++ b/backend/plugins/customize/e2e/raw_tables/issue_changelogs.csv
@@ -0,0 +1,7 @@
+id,issue_id,author_id,field_name,original_from_value,original_to_value,created_date
+changelog1,issue1,user1,status,Open,In Progress,2023-01-01 10:00:00+00:00
+changelog2,issue1,user2,assignee,user1,user2,2023-01-02 10:00:00+00:00
+changelog3,issue1,user3,Sprint,Sprint1,"Sprint2,Sprint3",2023-01-03 
10:00:00+00:00
+changelog4,issue2,user1,status,In Progress,Done,2023-01-04 10:00:00+00:00
+changelog5,issue2,user4,assignee,user3,user4,2023-01-05 10:00:00+00:00
+changelog6,issue2,user2,Sprint,"Sprint2,Sprint3",Sprint4,2023-01-06 
10:00:00+00:00
\ No newline at end of file
diff --git 
a/backend/plugins/customize/e2e/raw_tables/issue_changelogs_incremental.csv 
b/backend/plugins/customize/e2e/raw_tables/issue_changelogs_incremental.csv
new file mode 100644
index 000000000..95391c5b0
--- /dev/null
+++ b/backend/plugins/customize/e2e/raw_tables/issue_changelogs_incremental.csv
@@ -0,0 +1,5 @@
+id,issue_id,author_id,field_name,original_from_value,original_to_value,created_date
+changelog7,issue1,user3,status,Done,Reopened,2023-01-07 10:00:00+00:00
+changelog8,issue1,user5,assignee,user2,user5,2023-01-08 10:00:00+00:00
+changelog9,issue1,user4,Sprint,Sprint3,"Sprint4,Sprint5",2023-01-09 
10:00:00+00:00
+changelog10,issue3,user1,status,Open,In Progress,2023-01-10 10:00:00+00:00
\ No newline at end of file
diff --git a/backend/plugins/customize/e2e/raw_tables/issue_worklogs.csv 
b/backend/plugins/customize/e2e/raw_tables/issue_worklogs.csv
new file mode 100644
index 000000000..cdc5f3daa
--- /dev/null
+++ b/backend/plugins/customize/e2e/raw_tables/issue_worklogs.csv
@@ -0,0 +1,4 @@
+id,issue_id,author_name,time_spent_minutes,started_date,logged_date,comment
+worklog1,ISSUE-1,Alice,30,2023-01-01 10:00:00+00:00,2022-07-17 
07:15:55.959+00:00,"Initial work"
+worklog2,ISSUE-1,Bob,45,2023-01-02 11:00:00+00:00,2022-07-18 
08:20:30.123+00:00,"Follow up"
+worklog3,ISSUE-2,Alice,60,2023-01-03 09:00:00+00:00,2022-07-19 
09:45:15.456+00:00,"Task completion"
\ No newline at end of file
diff --git 
a/backend/plugins/customize/e2e/raw_tables/issue_worklogs_incremental.csv 
b/backend/plugins/customize/e2e/raw_tables/issue_worklogs_incremental.csv
new file mode 100644
index 000000000..1f1c56478
--- /dev/null
+++ b/backend/plugins/customize/e2e/raw_tables/issue_worklogs_incremental.csv
@@ -0,0 +1,3 @@
+id,issue_id,author_name,time_spent_minutes,started_date,logged_date,comment
+worklog4,ISSUE-2,Charlie,15,2023-01-04 14:00:00+00:00,2022-07-20 
10:30:45.789+00:00,"Quick fix"
+worklog2,ISSUE-1,Bob,20,2023-01-02 11:00:00+00:00,2022-07-21 
11:15:30.000+00:00,"Updated time"
\ No newline at end of file
diff --git a/backend/plugins/customize/e2e/raw_tables/issues_input.csv 
b/backend/plugins/customize/e2e/raw_tables/issues_input.csv
index 1eacd5fcd..8beecf831 100644
--- a/backend/plugins/customize/e2e/raw_tables/issues_input.csv
+++ b/backend/plugins/customize/e2e/raw_tables/issues_input.csv
@@ -1,4 +1,4 @@
-id,url,issue_key,title,original_type,original_status,created_date,resolution_date,story_point,priority,severity,original_estimate_minutes,time_spent_minutes,component,epic_key,creator_name,assignee_name,x_int,x_time,x_varchar,x_float,labels
-csv:1,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/1,1,issue
 test,BUG,new,2022-07-17 
07:15:55.959+00:00,NULL,0,major,,0,0,,,tgp,klesh,10,2022-09-15 
15:27:56,world,8,NULL
-csv:10,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/10,10,issue
 test007,BUG,new,2022-08-12 
13:43:00.783+00:00,NULL,0,trivial,,0,0,,,tgp,warren,30,2022-09-15 
15:27:56,abc,24590,hello worlds
-csv:11,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/11,11,issue
 test011,REQUIREMENT,new,2022-08-10 
13:44:46.508+00:00,NULL,0,major,,0,0,,,tgp,abeizn,1,2022-09-15 
15:27:56,NULL,0.00014,NULL
+id,url,issue_key,title,original_type,original_status,created_date,resolution_date,story_point,priority,severity,original_estimate_minutes,time_spent_minutes,component,epic_key,creator_name,assignee_name,x_int,x_time,x_varchar,x_float,labels,sprint_ids
+csv:1,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/1,1,issue
 test,BUG,new,2022-07-17 
07:15:55.959+00:00,NULL,0,major,,0,0,,,tgp,klesh,10,2022-09-15 
15:27:56,world,8,NULL,"101,102"
+csv:10,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/10,10,issue
 test007,BUG,new,2022-08-12 
13:43:00.783+00:00,NULL,0,trivial,,0,0,,,tgp,warren,30,2022-09-15 
15:27:56,abc,24590,hello worlds,101
+csv:11,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/11,11,issue
 test011,REQUIREMENT,new,2022-08-10 
13:44:46.508+00:00,NULL,0,major,,0,0,,,tgp,abeizn,1,2022-09-15 
15:27:56,NULL,0.00014,NULL,102
diff --git 
a/backend/plugins/customize/e2e/raw_tables/issues_input_incremental.csv 
b/backend/plugins/customize/e2e/raw_tables/issues_input_incremental.csv
index 71ca994db..90dcc4591 100644
--- a/backend/plugins/customize/e2e/raw_tables/issues_input_incremental.csv
+++ b/backend/plugins/customize/e2e/raw_tables/issues_input_incremental.csv
@@ -1,4 +1,4 @@
-id,url,issue_key,title,original_type,original_status,created_date,resolution_date,story_point,priority,severity,original_estimate_minutes,time_spent_minutes,component,epic_key,creator_name,assignee_name,x_int,x_time,x_varchar,x_float,labels
-csv:12,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/12,12,issue
 test012,REQUIREMENT,new,2022-08-11 
13:44:46.508+00:00,NULL,0,major,,0,0,,,tgp,,1,2022-09-15 
15:27:56,NULL,0.00014,NULL
-csv:13,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/13,13,issue
 test013,REQUIREMENT,new,2022-08-12 
13:44:46.508+00:00,NULL,0,critical,,0,0,,,tgp,,1,2022-09-15 
15:27:56,NULL,0.00014,NULL
-csv:14,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/14,14,issue
 test014,INCIDENT,new,2022-08-12 
13:45:12.810+00:00,NULL,0,blocker,,0,0,,,tgp,tgp,41534568464351,2022-09-15 
15:27:56,NULL,NULL,"label1,label2,label3"
+id,url,issue_key,title,original_type,original_status,created_date,resolution_date,story_point,priority,severity,original_estimate_minutes,time_spent_minutes,component,epic_key,creator_name,assignee_name,x_int,x_time,x_varchar,x_float,labels,sprint_ids
+csv:12,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/12,12,issue
 test012,REQUIREMENT,new,2022-08-11 
13:44:46.508+00:00,NULL,0,major,,0,0,,,tgp,,1,2022-09-15 
15:27:56,NULL,0.00014,NULL,103
+csv:13,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/13,13,issue
 test013,REQUIREMENT,new,2022-08-12 
13:44:46.508+00:00,NULL,0,critical,,0,0,,,tgp,,1,2022-09-15 
15:27:56,NULL,0.00014,NULL,101
+csv:14,https://api.bitbucket.org/2.0/repositories/thenicetgp/lake/issues/14,14,issue
 test014,INCIDENT,new,2022-08-12 
13:45:12.810+00:00,NULL,0,blocker,,0,0,,,tgp,tgp,41534568464351,2022-09-15 
15:27:56,NULL,NULL,"label1,label2,label3",102
diff --git a/backend/plugins/customize/e2e/raw_tables/sprints.csv 
b/backend/plugins/customize/e2e/raw_tables/sprints.csv
new file mode 100644
index 000000000..d9c4d98ea
--- /dev/null
+++ b/backend/plugins/customize/e2e/raw_tables/sprints.csv
@@ -0,0 +1,3 @@
+id,url,status,name,started_date,ended_date,completed_date
+SPRINT-1,http://example.com/sprint1,active,Sprint 1,2023-01-01 
00:00:00+00:00,2023-01-14 00:00:00+00:00,2023-01-15 00:00:00+00:00
+SPRINT-2,http://example.com/sprint2,active,Sprint 2,2023-02-01 
00:00:00+00:00,2023-02-14 00:00:00+00:00,2023-02-15 00:00:00+00:00
diff --git a/backend/plugins/customize/e2e/raw_tables/sprints_incremental.csv 
b/backend/plugins/customize/e2e/raw_tables/sprints_incremental.csv
new file mode 100644
index 000000000..f3d956c00
--- /dev/null
+++ b/backend/plugins/customize/e2e/raw_tables/sprints_incremental.csv
@@ -0,0 +1,2 @@
+id,url,status,name,started_date,ended_date,completed_date
+SPRINT-3,http://example.com/sprint3,active,Sprint 3,2023-03-01 
00:00:00+00:00,2023-03-14 00:00:00+00:00,2023-03-15 00:00:00+00:00
diff --git 
a/backend/plugins/customize/e2e/snapshot_tables/accounts_from_issue_changelogs.csv
 
b/backend/plugins/customize/e2e/snapshot_tables/accounts_from_issue_changelogs.csv
new file mode 100644
index 000000000..bd9ad5701
--- /dev/null
+++ 
b/backend/plugins/customize/e2e/snapshot_tables/accounts_from_issue_changelogs.csv
@@ -0,0 +1,6 @@
+id,full_name,user_name
+csv:CsvAccount:0:user1,user1,user1
+csv:CsvAccount:0:user2,user2,user2
+csv:CsvAccount:0:user3,user3,user3
+csv:CsvAccount:0:user4,user4,user4
+csv:CsvAccount:0:user5,user5,user5
diff --git 
a/backend/plugins/customize/e2e/snapshot_tables/accounts_from_issue_worklogs.csv
 
b/backend/plugins/customize/e2e/snapshot_tables/accounts_from_issue_worklogs.csv
new file mode 100644
index 000000000..4e5080935
--- /dev/null
+++ 
b/backend/plugins/customize/e2e/snapshot_tables/accounts_from_issue_worklogs.csv
@@ -0,0 +1,4 @@
+id,full_name,user_name
+csv:CsvAccount:0:Alice,Alice,Alice
+csv:CsvAccount:0:Bob,Bob,Bob
+csv:CsvAccount:0:Charlie,Charlie,Charlie
diff --git a/backend/plugins/customize/e2e/snapshot_tables/board_sprints.csv 
b/backend/plugins/customize/e2e/snapshot_tables/board_sprints.csv
new file mode 100644
index 000000000..fd7887245
--- /dev/null
+++ b/backend/plugins/customize/e2e/snapshot_tables/board_sprints.csv
@@ -0,0 +1,4 @@
+board_id,sprint_id,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark
+csv-board,SPRINT-1,,,0,
+csv-board,SPRINT-2,,,0,
+csv-board,SPRINT-3,,,0,
diff --git a/backend/plugins/customize/e2e/snapshot_tables/issue_changelogs.csv 
b/backend/plugins/customize/e2e/snapshot_tables/issue_changelogs.csv
new file mode 100644
index 000000000..38216871c
--- /dev/null
+++ b/backend/plugins/customize/e2e/snapshot_tables/issue_changelogs.csv
@@ -0,0 +1,7 @@
+id,issue_id,author_id,field_name,original_from_value,original_to_value,created_date,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark
+changelog1,issue1,user1,status,Open,In 
Progress,2023-01-01T10:00:00.000+00:00,TEST_BOARD,,,
+changelog2,issue1,user2,assignee,csv:CsvAccount:0:user1,csv:CsvAccount:0:user2,2023-01-02T10:00:00.000+00:00,TEST_BOARD,,,
+changelog3,issue1,user3,Sprint,Sprint1,"Sprint2,Sprint3",2023-01-03T10:00:00.000+00:00,TEST_BOARD,,,
+changelog4,issue2,user1,status,In 
Progress,Done,2023-01-04T10:00:00.000+00:00,TEST_BOARD,,,
+changelog5,issue2,user4,assignee,csv:CsvAccount:0:user3,csv:CsvAccount:0:user4,2023-01-05T10:00:00.000+00:00,TEST_BOARD,,,
+changelog6,issue2,user2,Sprint,"Sprint2,Sprint3",Sprint4,2023-01-06T10:00:00.000+00:00,TEST_BOARD,,,
diff --git 
a/backend/plugins/customize/e2e/snapshot_tables/issue_changelogs_incremental.csv
 
b/backend/plugins/customize/e2e/snapshot_tables/issue_changelogs_incremental.csv
new file mode 100644
index 000000000..e13481a0b
--- /dev/null
+++ 
b/backend/plugins/customize/e2e/snapshot_tables/issue_changelogs_incremental.csv
@@ -0,0 +1,11 @@
+id,issue_id,author_id,field_name,original_from_value,original_to_value,created_date,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark
+changelog1,issue1,user1,status,Open,In 
Progress,2023-01-01T10:00:00.000+00:00,TEST_BOARD,,,
+changelog10,issue3,user1,status,Open,In 
Progress,2023-01-10T10:00:00.000+00:00,TEST_BOARD,,,
+changelog2,issue1,user2,assignee,csv:CsvAccount:0:user1,csv:CsvAccount:0:user2,2023-01-02T10:00:00.000+00:00,TEST_BOARD,,,
+changelog3,issue1,user3,Sprint,Sprint1,"Sprint2,Sprint3",2023-01-03T10:00:00.000+00:00,TEST_BOARD,,,
+changelog4,issue2,user1,status,In 
Progress,Done,2023-01-04T10:00:00.000+00:00,TEST_BOARD,,,
+changelog5,issue2,user4,assignee,csv:CsvAccount:0:user3,csv:CsvAccount:0:user4,2023-01-05T10:00:00.000+00:00,TEST_BOARD,,,
+changelog6,issue2,user2,Sprint,"Sprint2,Sprint3",Sprint4,2023-01-06T10:00:00.000+00:00,TEST_BOARD,,,
+changelog7,issue1,user3,status,Done,Reopened,2023-01-07T10:00:00.000+00:00,TEST_BOARD,,,
+changelog8,issue1,user5,assignee,csv:CsvAccount:0:user2,csv:CsvAccount:0:user5,2023-01-08T10:00:00.000+00:00,TEST_BOARD,,,
+changelog9,issue1,user4,Sprint,Sprint3,"Sprint4,Sprint5",2023-01-09T10:00:00.000+00:00,TEST_BOARD,,,
diff --git a/backend/plugins/customize/e2e/snapshot_tables/issue_worklogs.csv 
b/backend/plugins/customize/e2e/snapshot_tables/issue_worklogs.csv
new file mode 100644
index 000000000..3bacb2d21
--- /dev/null
+++ b/backend/plugins/customize/e2e/snapshot_tables/issue_worklogs.csv
@@ -0,0 +1,4 @@
+id,issue_id,author_id,time_spent_minutes,started_date,logged_date,comment,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark
+worklog1,ISSUE-1,csv:CsvAccount:0:Alice,30,2023-01-01T10:00:00.000+00:00,2022-07-17T07:15:55.959+00:00,Initial
 work,TEST_BOARD,,,
+worklog2,ISSUE-1,csv:CsvAccount:0:Bob,45,2023-01-02T11:00:00.000+00:00,2022-07-18T08:20:30.123+00:00,Follow
 up,TEST_BOARD,,,
+worklog3,ISSUE-2,csv:CsvAccount:0:Alice,60,2023-01-03T09:00:00.000+00:00,2022-07-19T09:45:15.456+00:00,Task
 completion,TEST_BOARD,,,
diff --git 
a/backend/plugins/customize/e2e/snapshot_tables/issue_worklogs_incremental.csv 
b/backend/plugins/customize/e2e/snapshot_tables/issue_worklogs_incremental.csv
new file mode 100644
index 000000000..15b98726b
--- /dev/null
+++ 
b/backend/plugins/customize/e2e/snapshot_tables/issue_worklogs_incremental.csv
@@ -0,0 +1,5 @@
+id,issue_id,author_id,time_spent_minutes,started_date,logged_date,comment,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark
+worklog1,ISSUE-1,csv:CsvAccount:0:Alice,30,2023-01-01T10:00:00.000+00:00,2022-07-17T07:15:55.959+00:00,Initial
 work,TEST_BOARD,,,
+worklog2,ISSUE-1,csv:CsvAccount:0:Bob,20,2023-01-02T11:00:00.000+00:00,2022-07-21T11:15:30.000+00:00,Updated
 time,TEST_BOARD,,,
+worklog3,ISSUE-2,csv:CsvAccount:0:Alice,60,2023-01-03T09:00:00.000+00:00,2022-07-19T09:45:15.456+00:00,Task
 completion,TEST_BOARD,,,
+worklog4,ISSUE-2,csv:CsvAccount:0:Charlie,15,2023-01-04T14:00:00.000+00:00,2022-07-20T10:30:45.789+00:00,Quick
 fix,TEST_BOARD,,,
diff --git a/backend/plugins/customize/e2e/snapshot_tables/sprint_issues.csv 
b/backend/plugins/customize/e2e/snapshot_tables/sprint_issues.csv
new file mode 100644
index 000000000..6d87a3881
--- /dev/null
+++ b/backend/plugins/customize/e2e/snapshot_tables/sprint_issues.csv
@@ -0,0 +1,8 @@
+sprint_id,issue_id,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark
+101,csv:1,,,0,
+101,csv:10,,,0,
+101,csv:13,,,0,
+102,csv:1,,,0,
+102,csv:11,,,0,
+102,csv:14,,,0,
+103,csv:12,,,0,
diff --git a/backend/plugins/customize/e2e/snapshot_tables/sprints.csv 
b/backend/plugins/customize/e2e/snapshot_tables/sprints.csv
new file mode 100644
index 000000000..3019c260e
--- /dev/null
+++ b/backend/plugins/customize/e2e/snapshot_tables/sprints.csv
@@ -0,0 +1,4 @@
+id,url,status,name,started_date,ended_date,completed_date,original_board_id,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark
+SPRINT-1,http://example.com/sprint1,active,Sprint 
1,2023-01-01T00:00:00.000+00:00,2023-01-14T00:00:00.000+00:00,2023-01-15T00:00:00.000+00:00,csv-board,csv-board,,,
+SPRINT-2,http://example.com/sprint2,active,Sprint 
2,2023-02-01T00:00:00.000+00:00,2023-02-14T00:00:00.000+00:00,2023-02-15T00:00:00.000+00:00,csv-board,csv-board,,,
+SPRINT-3,http://example.com/sprint3,active,Sprint 
3,2023-03-01T00:00:00.000+00:00,2023-03-14T00:00:00.000+00:00,2023-03-15T00:00:00.000+00:00,csv-board,csv-board,,,
diff --git a/backend/plugins/customize/impl/impl.go 
b/backend/plugins/customize/impl/impl.go
index 599b02c60..dd689b794 100644
--- a/backend/plugins/customize/impl/impl.go
+++ b/backend/plugins/customize/impl/impl.go
@@ -108,6 +108,15 @@ func (p Customize) ApiResources() 
map[string]map[string]plugin.ApiResourceHandle
                "csvfiles/issue_repo_commits.csv": {
                        "POST": handlers.ImportIssueRepoCommit,
                },
+               "csvfiles/issue_changelogs.csv": {
+                       "POST": handlers.ImportIssueChangelog,
+               },
+               "csvfiles/issue_worklogs.csv": {
+                       "POST": handlers.ImportIssueWorklog,
+               },
+               "csvfiles/sprints.csv": {
+                       "POST": handlers.ImportSprint,
+               },
                "csvfiles/qa_apis.csv": {
                        "POST": handlers.ImportQaApis,
                },
diff --git a/backend/plugins/customize/service/service.go 
b/backend/plugins/customize/service/service.go
index 22267453f..89e8833cf 100644
--- a/backend/plugins/customize/service/service.go
+++ b/backend/plugins/customize/service/service.go
@@ -283,7 +283,7 @@ func (s *Service) createOrUpdateAccount(accountName string, 
rawDataParams string
                },
                FullName:    accountName,
                UserName:    accountName,
-               CreatedDate: &now,
+               CreatedDate: &now, // FIXME: will update created_date if 
already exists. to debug, using created_at instead
        }
        err := s.dal.CreateOrUpdate(account)
        if err != nil {
@@ -395,6 +395,26 @@ func (s *Service) issueHandlerFactory(boardId string, 
incremental bool) func(rec
                        record["assignee_id"] = assigneeId
                }
 
+               // Handle sprint_ids
+               sprintIds, err := getStringField(record, "sprint_ids", false)
+               if err != nil {
+                       return err
+               }
+               sprints := strings.Split(strings.TrimSpace(sprintIds), ",")
+               for _, sprintId := range sprints {
+                       sprintId = strings.TrimSpace(sprintId)
+                       if sprintId != "" {
+                               err = s.dal.CreateOrUpdate(&ticket.SprintIssue{
+                                       SprintId: sprintId,
+                                       IssueId:  id,
+                               })
+                               if err != nil {
+                                       return err
+                               }
+                       }
+               }
+               delete(record, "sprint_ids")
+
                // Handle issues
                err = s.dal.CreateWithMap(&ticket.Issue{}, record)
                if err != nil {
@@ -539,3 +559,143 @@ func (s *Service) issueRepoCommitHandler(record 
map[string]interface{}) errors.E
        delete(record, "repo_url")
        return s.dal.CreateWithMap(&crossdomain.IssueCommit{}, record)
 }
+
+// ImportSprint imports csv file into the table `sprints`
+func (s *Service) ImportSprint(boardId string, file io.ReadCloser, incremental 
bool) errors.Error {
+       if !incremental {
+               err := s.dal.Delete(
+                       &ticket.Sprint{},
+                       dal.Where("id IN (SELECT sprint_id FROM board_sprints 
WHERE board_id=? AND sprint_id NOT IN (SELECT sprint_id FROM board_sprints 
WHERE board_id!=?))", boardId, boardId),
+               )
+               if err != nil {
+                       return err
+               }
+               err = s.dal.Delete(
+                       &ticket.BoardSprint{},
+                       dal.Where("board_id = ?", boardId),
+               )
+               if err != nil {
+                       return err
+               }
+       }
+       return s.importCSV(file, boardId, s.sprintHandler(boardId))
+}
+
+// sprintHandler saves a record into the `sprints` table
+func (s *Service) sprintHandler(boardId string) func(record 
map[string]interface{}) errors.Error {
+       return func(record map[string]interface{}) errors.Error {
+               id, err := getStringField(record, "id", true)
+               if err != nil {
+                       return err
+               }
+               record["original_board_id"] = boardId
+               err = s.dal.CreateWithMap(&ticket.Sprint{}, record)
+               if err != nil {
+                       return err
+               }
+
+               // Create board_sprint relation
+               return s.dal.CreateOrUpdate(&ticket.BoardSprint{
+                       BoardId:  boardId,
+                       SprintId: id,
+               })
+       }
+}
+
+// ImportIssueChangelog imports csv file into the table `issue_changelogs`
+func (s *Service) ImportIssueChangelog(boardId string, file io.ReadCloser, 
incremental bool) errors.Error {
+       if !incremental {
+               err := s.dal.Delete(
+                       &ticket.IssueChangelogs{},
+                       dal.Where("issue_id IN (SELECT issue_id FROM 
board_issues WHERE board_id=? AND issue_id NOT IN (SELECT issue_id FROM 
board_issues WHERE board_id!=?))", boardId, boardId),
+               )
+               if err != nil {
+                       return err
+               }
+       }
+       return s.importCSV(file, boardId, s.issueChangelogHandler)
+}
+
+// issueChangelogHandler saves a record into the `issue_changelogs` table
+func (s *Service) issueChangelogHandler(record map[string]interface{}) 
errors.Error {
+       // create account
+       authorName, err := getStringField(record, "author_name", false)
+       if err != nil {
+               return err
+       }
+       rawDataParams, err := getStringField(record, "_raw_data_params", true)
+       if err != nil {
+               return err
+       }
+       if authorName != "" {
+               authorId, err := s.createOrUpdateAccount(authorName, 
rawDataParams)
+               if err != nil {
+                       return err
+               }
+               record["author_id"] = authorId
+       }
+       // set field_id = field_name
+       fieldName, err := getStringField(record, "field_name", true)
+       if err != nil {
+               return err
+       }
+       record["field_id"] = fieldName
+       // handle assignee
+       if fieldName == "assignee" {
+               originalFromValue, err := getStringField(record, 
"original_from_value", false)
+               if err != nil {
+                       return err
+               }
+               originalToValue, err := getStringField(record, 
"original_to_value", false)
+               if err != nil {
+                       return err
+               }
+               fromId, err := s.createOrUpdateAccount(originalFromValue, 
rawDataParams)
+               if err != nil {
+                       return err
+               }
+               record["original_from_value"] = fromId
+               toId, err := s.createOrUpdateAccount(originalToValue, 
rawDataParams)
+               if err != nil {
+                       return err
+               }
+               record["original_to_value"] = toId
+       }
+       return s.dal.CreateWithMap(&ticket.IssueChangelogs{}, record)
+}
+
+// ImportIssueWorklog imports csv file into the table `issue_worklogs`
+func (s *Service) ImportIssueWorklog(boardId string, file io.ReadCloser, 
incremental bool) errors.Error {
+       if !incremental {
+               err := s.dal.Delete(
+                       &ticket.IssueWorklog{},
+                       dal.Where("issue_id IN (SELECT issue_id FROM 
board_issues WHERE board_id=? AND issue_id NOT IN (SELECT issue_id FROM 
board_issues WHERE board_id!=?))", boardId, boardId),
+               )
+               if err != nil {
+                       return err
+               }
+       }
+       return s.importCSV(file, boardId, s.issueWorklogHandler)
+}
+
+// issueWorklogHandler saves a record into the `issue_worklogs` table
+func (s *Service) issueWorklogHandler(record map[string]interface{}) 
errors.Error {
+       // create account
+       authorName, err := getStringField(record, "author_name", false)
+       if err != nil {
+               return err
+       }
+       if authorName != "" {
+               rawDataParams, err := getStringField(record, 
"_raw_data_params", true)
+               if err != nil {
+                       return err
+               }
+               authorId, err := s.createOrUpdateAccount(authorName, 
rawDataParams)
+               if err != nil {
+                       return err
+               }
+               record["author_id"] = authorId
+       }
+       delete(record, "author_name")
+       return s.dal.CreateWithMap(&ticket.IssueWorklog{}, record)
+}


Reply via email to