This is an automated email from the ASF dual-hosted git repository.
klesh pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git
The following commit(s) were added to refs/heads/main by this push:
new b1cf56014 feat(q_dev): add Account ID to scope config and
auto-construct both report paths (#8725)
b1cf56014 is described below
commit b1cf560142ade6b07853598fc701346abb7ccf3a
Author: Warren Chen <[email protected]>
AuthorDate: Fri Feb 20 21:04:07 2026 +0800
feat(q_dev): add Account ID to scope config and auto-construct both report
paths (#8725)
User now provides basePath and accountId instead of manually typing the full
S3 prefix. The collector automatically constructs and scans both
by_user_analytic and user_report paths under
{basePath}/AWSLogs/{accountId}/KiroLogs/…/{region}/{year}[/{month}].
Includes migration, updated blueprint, multi-prefix collector iteration,
user_report model/extractor/dashboard, and frontend Account ID input field.
Old scopes without accountId continue to work unchanged.
---
backend/plugins/q_dev/api/blueprint_v200.go | 4 +
backend/plugins/q_dev/impl/impl.go | 21 +
backend/plugins/q_dev/impl/impl_test.go | 39 +-
...gister.go => 20260219_add_user_report_table.go} | 31 +-
...r.go => 20260220_add_account_id_to_s3_slice.go} | 36 +-
.../migrationscripts/archived/user_report.go | 46 ++
.../q_dev/models/migrationscripts/register.go | 2 +
backend/plugins/q_dev/models/s3_slice.go | 58 ++-
backend/plugins/q_dev/models/user_report.go | 46 ++
backend/plugins/q_dev/tasks/s3_data_extractor.go | 139 +++++-
.../plugins/q_dev/tasks/s3_data_extractor_test.go | 167 ++++++++
backend/plugins/q_dev/tasks/s3_file_collector.go | 108 +++--
backend/plugins/q_dev/tasks/task_data.go | 7 +-
backend/plugins/q_dev/tasks/task_data_test.go | 16 +
.../src/plugins/register/q-dev/data-scope.tsx | 87 +++-
grafana/dashboards/qdev_user_data.json | 2 +-
grafana/dashboards/qdev_user_report.json | 464 +++++++++++++++++++++
17 files changed, 1155 insertions(+), 118 deletions(-)
diff --git a/backend/plugins/q_dev/api/blueprint_v200.go
b/backend/plugins/q_dev/api/blueprint_v200.go
index e3b845cb8..d7606a275 100644
--- a/backend/plugins/q_dev/api/blueprint_v200.go
+++ b/backend/plugins/q_dev/api/blueprint_v200.go
@@ -72,6 +72,10 @@ func makeDataSourcePipelinePlanV200(
ConnectionId: s3Slice.ConnectionId,
S3Prefix: s3Slice.Prefix,
ScopeId: s3Slice.Id,
+ AccountId: s3Slice.AccountId,
+ BasePath: s3Slice.BasePath,
+ Year: s3Slice.Year,
+ Month: s3Slice.Month,
}
// Pass empty entities array to enable all subtasks
diff --git a/backend/plugins/q_dev/impl/impl.go
b/backend/plugins/q_dev/impl/impl.go
index 80118212e..e38fe7ad7 100644
--- a/backend/plugins/q_dev/impl/impl.go
+++ b/backend/plugins/q_dev/impl/impl.go
@@ -57,6 +57,7 @@ func (p QDev) GetTablesInfo() []dal.Tabler {
&models.QDevUserData{},
&models.QDevS3FileMeta{},
&models.QDevS3Slice{},
+ &models.QDevUserReport{},
}
}
@@ -117,10 +118,30 @@ func (p QDev) PrepareTaskData(taskCtx plugin.TaskContext,
options map[string]int
identityClient = nil
}
+ // Resolve S3 prefixes to scan
+ var s3Prefixes []string
+ if op.AccountId != "" {
+ // New-style scope: construct both report paths using region
from connection
+ region := connection.Region
+ timePart := fmt.Sprintf("%04d", op.Year)
+ if op.Month != nil {
+ timePart = fmt.Sprintf("%04d/%02d", op.Year, *op.Month)
+ }
+ base := fmt.Sprintf("%s/AWSLogs/%s/KiroLogs", op.BasePath,
op.AccountId)
+ s3Prefixes = []string{
+ fmt.Sprintf("%s/by_user_analytic/%s/%s", base, region,
timePart),
+ fmt.Sprintf("%s/user_report/%s/%s", base, region,
timePart),
+ }
+ } else {
+ // Legacy scope: use S3Prefix directly
+ s3Prefixes = []string{op.S3Prefix}
+ }
+
return &tasks.QDevTaskData{
Options: &op,
S3Client: s3Client,
IdentityClient: identityClient,
+ S3Prefixes: s3Prefixes,
}, nil
}
diff --git a/backend/plugins/q_dev/impl/impl_test.go
b/backend/plugins/q_dev/impl/impl_test.go
index 97dea86e6..e61b53251 100644
--- a/backend/plugins/q_dev/impl/impl_test.go
+++ b/backend/plugins/q_dev/impl/impl_test.go
@@ -34,7 +34,7 @@ func TestQDev_BasicPluginMethods(t *testing.T) {
// Test table info
tables := plugin.GetTablesInfo()
- assert.Len(t, tables, 4)
+ assert.Len(t, tables, 5)
// Test subtask metas
subtasks := plugin.SubTaskMetas()
@@ -48,7 +48,7 @@ func TestQDev_BasicPluginMethods(t *testing.T) {
}
func TestQDev_TaskDataStructure(t *testing.T) {
- // Test that QDevTaskData has the expected structure
+ // Test that QDevTaskData has the expected structure (legacy mode)
taskData := &tasks.QDevTaskData{
Options: &tasks.QDevOptions{
ConnectionId: 1,
@@ -61,6 +61,7 @@ func TestQDev_TaskDataStructure(t *testing.T) {
StoreId: "d-1234567890",
Region: "us-west-2",
},
+ S3Prefixes: []string{"test/"},
}
assert.NotNil(t, taskData.Options)
@@ -72,6 +73,36 @@ func TestQDev_TaskDataStructure(t *testing.T) {
assert.Equal(t, "test-bucket", taskData.S3Client.Bucket)
assert.Equal(t, "d-1234567890", taskData.IdentityClient.StoreId)
assert.Equal(t, "us-west-2", taskData.IdentityClient.Region)
+ assert.Equal(t, []string{"test/"}, taskData.S3Prefixes)
+}
+
+func TestQDev_TaskDataWithAccountId(t *testing.T) {
+ // Test new-style scope with AccountId and multiple S3Prefixes
+ month := 1
+ taskData := &tasks.QDevTaskData{
+ Options: &tasks.QDevOptions{
+ ConnectionId: 1,
+ AccountId: "034362076319",
+ BasePath: "user-report",
+ Year: 2026,
+ Month: &month,
+ },
+ S3Client: &tasks.QDevS3Client{
+ Bucket: "test-bucket",
+ },
+ S3Prefixes: []string{
+
"user-report/AWSLogs/034362076319/KiroLogs/by_user_analytic/us-east-1/2026/01",
+
"user-report/AWSLogs/034362076319/KiroLogs/user_report/us-east-1/2026/01",
+ },
+ }
+
+ assert.Equal(t, "034362076319", taskData.Options.AccountId)
+ assert.Equal(t, "user-report", taskData.Options.BasePath)
+ assert.Equal(t, 2026, taskData.Options.Year)
+ assert.Equal(t, &month, taskData.Options.Month)
+ assert.Len(t, taskData.S3Prefixes, 2)
+ assert.Contains(t, taskData.S3Prefixes[0], "by_user_analytic")
+ assert.Contains(t, taskData.S3Prefixes[1], "user_report")
}
func TestQDev_TaskDataWithoutIdentityClient(t *testing.T) {
@@ -83,10 +114,12 @@ func TestQDev_TaskDataWithoutIdentityClient(t *testing.T) {
S3Client: &tasks.QDevS3Client{
Bucket: "test-bucket",
},
- IdentityClient: nil, // No identity client
+ IdentityClient: nil,
+ S3Prefixes: []string{"some-prefix/"},
}
assert.NotNil(t, taskData.Options)
assert.NotNil(t, taskData.S3Client)
assert.Nil(t, taskData.IdentityClient)
+ assert.Len(t, taskData.S3Prefixes, 1)
}
diff --git a/backend/plugins/q_dev/models/migrationscripts/register.go
b/backend/plugins/q_dev/models/migrationscripts/20260219_add_user_report_table.go
similarity index 55%
copy from backend/plugins/q_dev/models/migrationscripts/register.go
copy to
backend/plugins/q_dev/models/migrationscripts/20260219_add_user_report_table.go
index 86971e539..5f38c7407 100644
--- a/backend/plugins/q_dev/models/migrationscripts/register.go
+++
b/backend/plugins/q_dev/models/migrationscripts/20260219_add_user_report_table.go
@@ -18,18 +18,25 @@ limitations under the License.
package migrationscripts
import (
- "github.com/apache/incubator-devlake/core/plugin"
+ "github.com/apache/incubator-devlake/core/context"
+ "github.com/apache/incubator-devlake/core/errors"
+ "github.com/apache/incubator-devlake/helpers/migrationhelper"
+
"github.com/apache/incubator-devlake/plugins/q_dev/models/migrationscripts/archived"
)
-// All return all migration scripts
-func All() []plugin.MigrationScript {
- return []plugin.MigrationScript{
- new(initTables),
- new(modifyFileMetaTable),
- new(addDisplayNameFields),
- new(addMissingMetrics),
- new(addS3SliceTable),
- new(addScopeConfigIdToS3Slice),
- new(addScopeIdFields),
- }
+type addUserReportTable struct{}
+
+func (*addUserReportTable) Up(basicRes context.BasicRes) errors.Error {
+ return migrationhelper.AutoMigrateTables(
+ basicRes,
+ &archived.QDevUserReport{},
+ )
+}
+
+func (*addUserReportTable) Version() uint64 {
+ return 20260219000001
+}
+
+func (*addUserReportTable) Name() string {
+ return "Add user_report table for Kiro credits/subscription metrics"
}
diff --git a/backend/plugins/q_dev/models/migrationscripts/register.go
b/backend/plugins/q_dev/models/migrationscripts/20260220_add_account_id_to_s3_slice.go
similarity index 50%
copy from backend/plugins/q_dev/models/migrationscripts/register.go
copy to
backend/plugins/q_dev/models/migrationscripts/20260220_add_account_id_to_s3_slice.go
index 86971e539..71a13c7b2 100644
--- a/backend/plugins/q_dev/models/migrationscripts/register.go
+++
b/backend/plugins/q_dev/models/migrationscripts/20260220_add_account_id_to_s3_slice.go
@@ -18,18 +18,34 @@ limitations under the License.
package migrationscripts
import (
+ "github.com/apache/incubator-devlake/core/context"
+ "github.com/apache/incubator-devlake/core/errors"
"github.com/apache/incubator-devlake/core/plugin"
)
-// All return all migration scripts
-func All() []plugin.MigrationScript {
- return []plugin.MigrationScript{
- new(initTables),
- new(modifyFileMetaTable),
- new(addDisplayNameFields),
- new(addMissingMetrics),
- new(addS3SliceTable),
- new(addScopeConfigIdToS3Slice),
- new(addScopeIdFields),
+var _ plugin.MigrationScript = (*addAccountIdToS3Slice)(nil)
+
+type addAccountIdToS3Slice struct{}
+
+func (*addAccountIdToS3Slice) Up(basicRes context.BasicRes) errors.Error {
+ db := basicRes.GetDal()
+
+ err := db.Exec(`
+ ALTER TABLE _tool_q_dev_s3_slices
+ ADD COLUMN IF NOT EXISTS account_id VARCHAR(255) DEFAULT NULL
+ `)
+ if err != nil {
+ // Try alternative syntax for databases that don't support IF
NOT EXISTS
+ _ = db.Exec(`ALTER TABLE _tool_q_dev_s3_slices ADD COLUMN
account_id VARCHAR(255) DEFAULT NULL`)
}
+
+ return nil
+}
+
+func (*addAccountIdToS3Slice) Version() uint64 {
+ return 20260220000001
+}
+
+func (*addAccountIdToS3Slice) Name() string {
+ return "add account_id column to _tool_q_dev_s3_slices for
auto-constructing S3 prefixes"
}
diff --git
a/backend/plugins/q_dev/models/migrationscripts/archived/user_report.go
b/backend/plugins/q_dev/models/migrationscripts/archived/user_report.go
new file mode 100644
index 000000000..53bef49b1
--- /dev/null
+++ b/backend/plugins/q_dev/models/migrationscripts/archived/user_report.go
@@ -0,0 +1,46 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package archived
+
+import (
+ "time"
+
+
"github.com/apache/incubator-devlake/core/models/migrationscripts/archived"
+)
+
+type QDevUserReport struct {
+ archived.Model
+ ConnectionId uint64 `gorm:"primaryKey"`
+ UserId string `gorm:"index" json:"userId"`
+ Date time.Time `gorm:"index" json:"date"`
+ DisplayName string `gorm:"type:varchar(255)"
json:"displayName"`
+ ScopeId string `gorm:"index;type:varchar(255)"
json:"scopeId"`
+ ClientType string `gorm:"type:varchar(50)" json:"clientType"`
+ SubscriptionTier string `gorm:"type:varchar(50)"
json:"subscriptionTier"`
+ ProfileId string `gorm:"type:varchar(512)" json:"profileId"`
+ ChatConversations int `json:"chatConversations"`
+ CreditsUsed float64 `json:"creditsUsed"`
+ OverageCap float64 `json:"overageCap"`
+ OverageCreditsUsed float64 `json:"overageCreditsUsed"`
+ OverageEnabled bool `json:"overageEnabled"`
+ TotalMessages int `json:"totalMessages"`
+}
+
+func (QDevUserReport) TableName() string {
+ return "_tool_q_dev_user_report"
+}
diff --git a/backend/plugins/q_dev/models/migrationscripts/register.go
b/backend/plugins/q_dev/models/migrationscripts/register.go
index 86971e539..825ab5658 100644
--- a/backend/plugins/q_dev/models/migrationscripts/register.go
+++ b/backend/plugins/q_dev/models/migrationscripts/register.go
@@ -31,5 +31,7 @@ func All() []plugin.MigrationScript {
new(addS3SliceTable),
new(addScopeConfigIdToS3Slice),
new(addScopeIdFields),
+ new(addUserReportTable),
+ new(addAccountIdToS3Slice),
}
}
diff --git a/backend/plugins/q_dev/models/s3_slice.go
b/backend/plugins/q_dev/models/s3_slice.go
index c844d1634..e918258a9 100644
--- a/backend/plugins/q_dev/models/s3_slice.go
+++ b/backend/plugins/q_dev/models/s3_slice.go
@@ -33,6 +33,7 @@ type QDevS3Slice struct {
Id string `json:"id" mapstructure:"id"
gorm:"primaryKey;type:varchar(512)"`
Prefix string `json:"prefix" mapstructure:"prefix"
gorm:"type:varchar(512);not null"`
BasePath string `json:"basePath" mapstructure:"basePath"
gorm:"type:varchar(512)"`
+ AccountId string `json:"accountId,omitempty"
mapstructure:"accountId" gorm:"type:varchar(255)"`
Year int `json:"year" mapstructure:"year" gorm:"not null"`
Month *int `json:"month,omitempty" mapstructure:"month"`
@@ -61,6 +62,7 @@ func (s *QDevS3Slice) normalize(strict bool) error {
}
s.BasePath = cleanPath(s.BasePath)
+ s.AccountId = strings.TrimSpace(s.AccountId)
s.Prefix = cleanPath(selectNonEmpty(s.Prefix, s.Id))
if s.Year <= 0 {
@@ -81,23 +83,37 @@ func (s *QDevS3Slice) normalize(strict bool) error {
}
}
- if s.Prefix == "" {
- s.Prefix = buildPrefix(s.BasePath, s.Year, s.Month)
- }
+ if s.AccountId != "" {
+ // New-style scope: construct a logical identifier from
component parts
+ s.Prefix = buildPrefixWithAccount(s.BasePath, s.AccountId,
s.Year, s.Month)
+ } else {
+ // Legacy scope: derive prefix from basePath + year + month
+ if s.Prefix == "" {
+ s.Prefix = buildPrefix(s.BasePath, s.Year, s.Month)
+ }
- prefix := buildPrefix(s.BasePath, s.Year, s.Month)
- if prefix != "" {
- s.Prefix = prefix
+ prefix := buildPrefix(s.BasePath, s.Year, s.Month)
+ if prefix != "" {
+ s.Prefix = prefix
+ }
}
if s.Id == "" {
s.Id = s.Prefix
}
- if s.Month != nil {
- s.Name = fmt.Sprintf("%04d-%02d", s.Year, *s.Month)
- } else if s.Year > 0 {
- s.Name = fmt.Sprintf("%04d", s.Year)
+ if s.AccountId != "" {
+ if s.Month != nil {
+ s.Name = fmt.Sprintf("%s %04d-%02d", s.AccountId,
s.Year, *s.Month)
+ } else if s.Year > 0 {
+ s.Name = fmt.Sprintf("%s %04d", s.AccountId, s.Year)
+ }
+ } else {
+ if s.Month != nil {
+ s.Name = fmt.Sprintf("%04d-%02d", s.Year, *s.Month)
+ } else if s.Year > 0 {
+ s.Name = fmt.Sprintf("%04d", s.Year)
+ }
}
if s.FullName == "" {
@@ -150,6 +166,14 @@ func (s QDevS3Slice) ScopeName() string {
if s.Name != "" {
return s.Name
}
+ if s.AccountId != "" {
+ if s.Month != nil {
+ return fmt.Sprintf("%s %04d-%02d", s.AccountId, s.Year,
*s.Month)
+ }
+ if s.Year > 0 {
+ return fmt.Sprintf("%s %04d", s.AccountId, s.Year)
+ }
+ }
if s.Month != nil {
return fmt.Sprintf("%04d-%02d", s.Year, *s.Month)
}
@@ -186,6 +210,20 @@ type QDevS3SliceParams struct {
var _ plugin.ToolLayerScope = (*QDevS3Slice)(nil)
+func buildPrefixWithAccount(basePath string, accountId string, year int, month
*int) string {
+ parts := splitPath(basePath)
+ if accountId != "" {
+ parts = append(parts, accountId)
+ }
+ if year > 0 {
+ parts = append(parts, fmt.Sprintf("%04d", year))
+ }
+ if month != nil {
+ parts = append(parts, fmt.Sprintf("%02d", *month))
+ }
+ return strings.Join(parts, "/")
+}
+
func buildPrefix(basePath string, year int, month *int) string {
parts := splitPath(basePath)
if year > 0 {
diff --git a/backend/plugins/q_dev/models/user_report.go
b/backend/plugins/q_dev/models/user_report.go
new file mode 100644
index 000000000..f64090e89
--- /dev/null
+++ b/backend/plugins/q_dev/models/user_report.go
@@ -0,0 +1,46 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package models
+
+import (
+ "time"
+
+ "github.com/apache/incubator-devlake/core/models/common"
+)
+
+type QDevUserReport struct {
+ common.Model
+ ConnectionId uint64 `gorm:"primaryKey"`
+ UserId string `gorm:"index" json:"userId"`
+ Date time.Time `gorm:"index" json:"date"`
+ DisplayName string `gorm:"type:varchar(255)"
json:"displayName"`
+ ScopeId string `gorm:"index;type:varchar(255)"
json:"scopeId"`
+ ClientType string `gorm:"type:varchar(50)" json:"clientType"`
+ SubscriptionTier string `gorm:"type:varchar(50)"
json:"subscriptionTier"`
+ ProfileId string `gorm:"type:varchar(512)" json:"profileId"`
+ ChatConversations int `json:"chatConversations"`
+ CreditsUsed float64 `json:"creditsUsed"`
+ OverageCap float64 `json:"overageCap"`
+ OverageCreditsUsed float64 `json:"overageCreditsUsed"`
+ OverageEnabled bool `json:"overageEnabled"`
+ TotalMessages int `json:"totalMessages"`
+}
+
+func (QDevUserReport) TableName() string {
+ return "_tool_q_dev_user_report"
+}
diff --git a/backend/plugins/q_dev/tasks/s3_data_extractor.go
b/backend/plugins/q_dev/tasks/s3_data_extractor.go
index 03f8a6f5f..919c4dbe6 100644
--- a/backend/plugins/q_dev/tasks/s3_data_extractor.go
+++ b/backend/plugins/q_dev/tasks/s3_data_extractor.go
@@ -124,6 +124,14 @@ func processCSVData(taskCtx plugin.SubTaskContext, db
dal.Dal, reader io.ReadClo
return errors.Convert(err)
}
+ // Auto-detect CSV format from headers
+ isNewFormat := detectUserReportFormat(headers)
+ if isNewFormat {
+ taskCtx.GetLogger().Debug("Detected new user_report CSV format")
+ } else {
+ taskCtx.GetLogger().Debug("Detected old by_user_analytic CSV
format")
+ }
+
// 逐行读取数据
for {
record, err := csvReader.Read()
@@ -134,22 +142,135 @@ func processCSVData(taskCtx plugin.SubTaskContext, db
dal.Dal, reader io.ReadClo
return errors.Convert(err)
}
- // 创建用户数据对象 (updated to include display name resolution)
- userData, err :=
createUserDataWithDisplayName(taskCtx.GetLogger(), headers, record, fileMeta,
data.IdentityClient)
- if err != nil {
- return errors.Default.Wrap(err, "failed to create user
data")
- }
+ if isNewFormat {
+ reportData, err :=
createUserReportData(taskCtx.GetLogger(), headers, record, fileMeta,
data.IdentityClient)
+ if err != nil {
+ return errors.Default.Wrap(err, "failed to
create user report data")
+ }
+ err = db.Create(reportData)
+ if err != nil {
+ return errors.Default.Wrap(err, "failed to save
user report data")
+ }
+ } else {
+ // 创建用户数据对象 (updated to include display name resolution)
+ userData, err :=
createUserDataWithDisplayName(taskCtx.GetLogger(), headers, record, fileMeta,
data.IdentityClient)
+ if err != nil {
+ return errors.Default.Wrap(err, "failed to
create user data")
+ }
- // Save to database - no need to check for duplicates since
we're processing each file only once
- err = db.Create(userData)
- if err != nil {
- return errors.Default.Wrap(err, "failed to save user
data")
+ // Save to database - no need to check for duplicates
since we're processing each file only once
+ err = db.Create(userData)
+ if err != nil {
+ return errors.Default.Wrap(err, "failed to save
user data")
+ }
}
}
return nil
}
+// detectUserReportFormat checks CSV headers to determine if this is the new
user_report format
+func detectUserReportFormat(headers []string) bool {
+ for _, h := range headers {
+ trimmed := strings.TrimSpace(h)
+ if trimmed == "Client_Type" || trimmed == "Credits_Used" {
+ return true
+ }
+ }
+ return false
+}
+
+// createUserReportData creates a QDevUserReport from a new-format CSV record
+func createUserReportData(logger interface {
+ Debug(format string, a ...interface{})
+}, headers []string, record []string, fileMeta *models.QDevS3FileMeta,
identityClient UserDisplayNameResolver) (*models.QDevUserReport, errors.Error) {
+ report := &models.QDevUserReport{
+ ConnectionId: fileMeta.ConnectionId,
+ ScopeId: fileMeta.ScopeId,
+ }
+
+ // Build field map
+ fieldMap := make(map[string]string)
+ for i, header := range headers {
+ if i < len(record) {
+ logger.Debug("Mapping header[%d]: '%s' -> '%s'", i,
header, record[i])
+ fieldMap[header] = record[i]
+ trimmedHeader := strings.TrimSpace(header)
+ if trimmedHeader != header {
+ logger.Debug("Also adding trimmed header:
'%s'", trimmedHeader)
+ fieldMap[trimmedHeader] = record[i]
+ }
+ }
+ }
+
+ // UserId
+ report.UserId = getStringField(fieldMap, "UserId")
+ if report.UserId == "" {
+ return nil, errors.Default.New("UserId not found in CSV record")
+ }
+
+ // DisplayName
+ report.DisplayName = resolveDisplayName(logger, report.UserId,
identityClient)
+
+ // Date
+ dateStr := getStringField(fieldMap, "Date")
+ if dateStr == "" {
+ return nil, errors.Default.New("Date not found in CSV record")
+ }
+ var err error
+ report.Date, err = parseDate(dateStr)
+ if err != nil {
+ return nil, errors.Default.Wrap(err, "failed to parse date")
+ }
+
+ // String fields
+ report.ClientType = getStringField(fieldMap, "Client_Type")
+ report.SubscriptionTier = getStringField(fieldMap, "Subscription_Tier")
+ report.ProfileId = getStringField(fieldMap, "ProfileId")
+
+ // Numeric fields
+ report.ChatConversations = parseInt(fieldMap, "Chat_Conversations")
+ report.CreditsUsed = parseFloat(fieldMap, "Credits_Used")
+ report.OverageCap = parseFloat(fieldMap, "Overage_Cap")
+ report.OverageCreditsUsed = parseFloat(fieldMap, "Overage_Credits_Used")
+ report.OverageEnabled = parseBool(fieldMap, "Overage_Enabled")
+ report.TotalMessages = parseInt(fieldMap, "Total_Messages")
+
+ return report, nil
+}
+
+// getStringField returns the string value for a field, or empty string if not
found
+func getStringField(fieldMap map[string]string, field string) string {
+ value, ok := fieldMap[field]
+ if !ok {
+ return ""
+ }
+ return value
+}
+
+// parseFloat extracts a float64 from the field map, returning 0 if missing or
invalid
+func parseFloat(fieldMap map[string]string, field string) float64 {
+ value, ok := fieldMap[field]
+ if !ok {
+ return 0
+ }
+ f, err := strconv.ParseFloat(strings.TrimSpace(value), 64)
+ if err != nil {
+ return 0
+ }
+ return f
+}
+
+// parseBool extracts a boolean from the field map, returning false if missing
or invalid
+func parseBool(fieldMap map[string]string, field string) bool {
+ value, ok := fieldMap[field]
+ if !ok {
+ return false
+ }
+ trimmed := strings.TrimSpace(strings.ToLower(value))
+ return trimmed == "true" || trimmed == "1" || trimmed == "yes"
+}
+
// UserDisplayNameResolver interface for resolving user display names
type UserDisplayNameResolver interface {
ResolveUserDisplayName(userId string) (string, error)
diff --git a/backend/plugins/q_dev/tasks/s3_data_extractor_test.go
b/backend/plugins/q_dev/tasks/s3_data_extractor_test.go
index fbea299dc..0a5f808eb 100644
--- a/backend/plugins/q_dev/tasks/s3_data_extractor_test.go
+++ b/backend/plugins/q_dev/tasks/s3_data_extractor_test.go
@@ -419,6 +419,173 @@ func TestParseDate(t *testing.T) {
}
}
+func TestDetectUserReportFormat(t *testing.T) {
+ // New format: contains Client_Type
+ assert.True(t, detectUserReportFormat([]string{"UserId", "Date",
"Client_Type", "Credits_Used"}))
+ // New format: contains Credits_Used
+ assert.True(t, detectUserReportFormat([]string{"UserId", "Date",
"Credits_Used", "Total_Messages"}))
+ // Old format: code-level metrics
+ assert.False(t, detectUserReportFormat([]string{"UserId", "Date",
"Chat_AICodeLines", "Inline_AICodeLines"}))
+ // Old format: no new-format indicators
+ assert.False(t, detectUserReportFormat([]string{"UserId", "Date",
"CodeReview_FindingsCount"}))
+ // Empty headers
+ assert.False(t, detectUserReportFormat([]string{}))
+ // Whitespace-padded header still detected
+ assert.True(t, detectUserReportFormat([]string{"UserId", " Client_Type
", "Date"}))
+}
+
+func TestCreateUserReportData_Success(t *testing.T) {
+ headers := []string{
+ "UserId", "Date", "Client_Type", "Subscription_Tier",
"ProfileId",
+ "Chat_Conversations", "Credits_Used", "Overage_Cap",
"Overage_Credits_Used",
+ "Overage_Enabled", "Total_Messages",
+ }
+ record := []string{
+ "user-abc", "2026-01-15", "KIRO_IDE", "Pro", "profile-xyz",
+ "12", "45.5", "100.0", "5.25",
+ "true", "87",
+ }
+ fileMeta := &models.QDevS3FileMeta{
+ ConnectionId: 1,
+ ScopeId: "scope-1",
+ }
+
+ mockIdentityClient := &MockIdentityClient{}
+ mockIdentityClient.On("ResolveUserDisplayName",
"user-abc").Return("Alice Bob", nil)
+
+ mockLogger := &MockLogger{}
+ mockLogger.On("Debug", mock.Anything, mock.Anything).Return()
+
+ report, err := createUserReportData(mockLogger, headers, record,
fileMeta, mockIdentityClient)
+
+ assert.NoError(t, err)
+ assert.NotNil(t, report)
+ assert.Equal(t, "user-abc", report.UserId)
+ assert.Equal(t, "Alice Bob", report.DisplayName)
+ assert.Equal(t, uint64(1), report.ConnectionId)
+ assert.Equal(t, "scope-1", report.ScopeId)
+ assert.Equal(t, "KIRO_IDE", report.ClientType)
+ assert.Equal(t, "Pro", report.SubscriptionTier)
+ assert.Equal(t, "profile-xyz", report.ProfileId)
+ assert.Equal(t, 12, report.ChatConversations)
+ assert.Equal(t, 45.5, report.CreditsUsed)
+ assert.Equal(t, 100.0, report.OverageCap)
+ assert.Equal(t, 5.25, report.OverageCreditsUsed)
+ assert.True(t, report.OverageEnabled)
+ assert.Equal(t, 87, report.TotalMessages)
+
+ expectedDate, _ := time.Parse("2006-01-02", "2026-01-15")
+ assert.Equal(t, expectedDate, report.Date)
+
+ mockIdentityClient.AssertExpectations(t)
+}
+
+func TestCreateUserReportData_MissingUserId(t *testing.T) {
+ headers := []string{"Date", "Client_Type", "Credits_Used"}
+ record := []string{"2026-01-15", "KIRO_IDE", "10.0"}
+ fileMeta := &models.QDevS3FileMeta{ConnectionId: 1}
+
+ mockLogger := &MockLogger{}
+ mockLogger.On("Debug", mock.Anything, mock.Anything).Return()
+
+ report, err := createUserReportData(mockLogger, headers, record,
fileMeta, nil)
+
+ assert.Error(t, err)
+ assert.Nil(t, report)
+ assert.Contains(t, err.Error(), "UserId not found")
+}
+
+func TestCreateUserReportData_MissingDate(t *testing.T) {
+ headers := []string{"UserId", "Client_Type", "Credits_Used"}
+ record := []string{"user-abc", "KIRO_IDE", "10.0"}
+ fileMeta := &models.QDevS3FileMeta{ConnectionId: 1}
+
+ mockLogger := &MockLogger{}
+ mockLogger.On("Debug", mock.Anything, mock.Anything).Return()
+
+ report, err := createUserReportData(mockLogger, headers, record,
fileMeta, nil)
+
+ assert.Error(t, err)
+ assert.Nil(t, report)
+ assert.Contains(t, err.Error(), "Date not found")
+}
+
+func TestCreateUserReportData_OverageDisabled(t *testing.T) {
+ headers := []string{"UserId", "Date", "Overage_Enabled", "Credits_Used"}
+ record := []string{"user-abc", "2026-01-15", "false", "10.0"}
+ fileMeta := &models.QDevS3FileMeta{ConnectionId: 1}
+
+ mockLogger := &MockLogger{}
+ mockLogger.On("Debug", mock.Anything, mock.Anything).Return()
+
+ report, err := createUserReportData(mockLogger, headers, record,
fileMeta, nil)
+
+ assert.NoError(t, err)
+ assert.False(t, report.OverageEnabled)
+}
+
+func TestCreateUserReportData_InvalidNumericValues(t *testing.T) {
+ headers := []string{"UserId", "Date", "Credits_Used",
"Chat_Conversations", "Total_Messages"}
+ record := []string{"user-abc", "2026-01-15", "not-a-float",
"not-an-int", ""}
+ fileMeta := &models.QDevS3FileMeta{ConnectionId: 1}
+
+ mockLogger := &MockLogger{}
+ mockLogger.On("Debug", mock.Anything, mock.Anything).Return()
+
+ report, err := createUserReportData(mockLogger, headers, record,
fileMeta, nil)
+
+ assert.NoError(t, err)
+ assert.Equal(t, float64(0), report.CreditsUsed)
+ assert.Equal(t, 0, report.ChatConversations)
+ assert.Equal(t, 0, report.TotalMessages)
+}
+
+func TestParseFloat(t *testing.T) {
+ fieldMap := map[string]string{
+ "ValidFloat": "3.14",
+ "ZeroFloat": "0",
+ "NegativeFloat": "-2.5",
+ "IntegerValue": "42",
+ "InvalidFloat": "not-a-number",
+ "EmptyString": "",
+ "Whitespace": " 1.5 ",
+ }
+
+ assert.Equal(t, 3.14, parseFloat(fieldMap, "ValidFloat"))
+ assert.Equal(t, float64(0), parseFloat(fieldMap, "ZeroFloat"))
+ assert.Equal(t, -2.5, parseFloat(fieldMap, "NegativeFloat"))
+ assert.Equal(t, float64(42), parseFloat(fieldMap, "IntegerValue"))
+ assert.Equal(t, float64(0), parseFloat(fieldMap, "InvalidFloat"))
+ assert.Equal(t, float64(0), parseFloat(fieldMap, "EmptyString"))
+ assert.Equal(t, 1.5, parseFloat(fieldMap, "Whitespace"))
+ assert.Equal(t, float64(0), parseFloat(fieldMap, "NonExistentField"))
+}
+
+func TestParseBool(t *testing.T) {
+ fieldMap := map[string]string{
+ "TrueValue": "true",
+ "TrueUpper": "True",
+ "TrueOne": "1",
+ "TrueYes": "yes",
+ "FalseValue": "false",
+ "FalseZero": "0",
+ "EmptyString": "",
+ "InvalidBool": "maybe",
+ "WhitespaceVal": " true ",
+ }
+
+ assert.True(t, parseBool(fieldMap, "TrueValue"))
+ assert.True(t, parseBool(fieldMap, "TrueUpper"))
+ assert.True(t, parseBool(fieldMap, "TrueOne"))
+ assert.True(t, parseBool(fieldMap, "TrueYes"))
+ assert.False(t, parseBool(fieldMap, "FalseValue"))
+ assert.False(t, parseBool(fieldMap, "FalseZero"))
+ assert.False(t, parseBool(fieldMap, "EmptyString"))
+ assert.False(t, parseBool(fieldMap, "InvalidBool"))
+ assert.True(t, parseBool(fieldMap, "WhitespaceVal"))
+ assert.False(t, parseBool(fieldMap, "NonExistentField"))
+}
+
func TestParseInt(t *testing.T) {
fieldMap := map[string]string{
"ValidInt": "42",
diff --git a/backend/plugins/q_dev/tasks/s3_file_collector.go
b/backend/plugins/q_dev/tasks/s3_file_collector.go
index ae88fb97a..9d40919ae 100644
--- a/backend/plugins/q_dev/tasks/s3_file_collector.go
+++ b/backend/plugins/q_dev/tasks/s3_file_collector.go
@@ -30,81 +30,79 @@ import (
var _ plugin.SubTaskEntryPoint = CollectQDevS3Files
-// CollectQDevS3Files 收集S3文件元数据
+// CollectQDevS3Files collects S3 file metadata
func CollectQDevS3Files(taskCtx plugin.SubTaskContext) errors.Error {
data := taskCtx.GetData().(*QDevTaskData)
db := taskCtx.GetDal()
- // 列出指定前缀下的所有对象
- var continuationToken *string
- prefix := data.Options.S3Prefix
- if prefix != "" && !strings.HasSuffix(prefix, "/") {
- prefix = prefix + "/"
- }
-
taskCtx.SetProgress(0, -1)
- for {
- input := &s3.ListObjectsV2Input{
- Bucket: aws.String(data.S3Client.Bucket),
- Prefix: aws.String(prefix),
- ContinuationToken: continuationToken,
+ for _, rawPrefix := range data.S3Prefixes {
+ prefix := rawPrefix
+ if prefix != "" && !strings.HasSuffix(prefix, "/") {
+ prefix = prefix + "/"
}
- result, err := data.S3Client.S3.ListObjectsV2(input)
- if err != nil {
- return errors.Convert(err)
- }
+ taskCtx.GetLogger().Info("Scanning S3 prefix: %s", prefix)
- // 处理每个CSV文件
- for _, object := range result.Contents {
- // Only process CSV files
- if !strings.HasSuffix(*object.Key, ".csv") {
- taskCtx.GetLogger().Debug("Skipping non-CSV
file: %s", *object.Key)
- continue
+ var continuationToken *string
+ for {
+ input := &s3.ListObjectsV2Input{
+ Bucket:
aws.String(data.S3Client.Bucket),
+ Prefix: aws.String(prefix),
+ ContinuationToken: continuationToken,
}
- // Check if this file already exists in our database
- existingFile := &models.QDevS3FileMeta{}
- err = db.First(existingFile, dal.Where("connection_id =
? AND s3_path = ?",
- data.Options.ConnectionId, *object.Key))
+ result, err := data.S3Client.S3.ListObjectsV2(input)
+ if err != nil {
+ return errors.Convert(err)
+ }
- if err == nil {
- // File already exists in database, skip it if
it's already processed
- if existingFile.Processed {
- taskCtx.GetLogger().Debug("Skipping
already processed file: %s", *object.Key)
+ for _, object := range result.Contents {
+ // Only process CSV files
+ if !strings.HasSuffix(*object.Key, ".csv") {
+ taskCtx.GetLogger().Debug("Skipping
non-CSV file: %s", *object.Key)
continue
}
- // Otherwise, we'll keep the existing record
(which is still marked as unprocessed)
- taskCtx.GetLogger().Debug("Found existing
unprocessed file: %s", *object.Key)
- continue
- } else if !db.IsErrorNotFound(err) {
- return errors.Default.Wrap(err, "failed to
query existing file metadata")
- }
- // This is a new file, save its metadata
- fileMeta := &models.QDevS3FileMeta{
- ConnectionId: data.Options.ConnectionId,
- FileName: *object.Key,
- S3Path: *object.Key,
- ScopeId: data.Options.ScopeId,
- Processed: false,
- }
+ // Check if this file already exists in our
database
+ existingFile := &models.QDevS3FileMeta{}
+ err = db.First(existingFile,
dal.Where("connection_id = ? AND s3_path = ?",
+ data.Options.ConnectionId, *object.Key))
+
+ if err == nil {
+ if existingFile.Processed {
+
taskCtx.GetLogger().Debug("Skipping already processed file: %s", *object.Key)
+ continue
+ }
+ taskCtx.GetLogger().Debug("Found
existing unprocessed file: %s", *object.Key)
+ continue
+ } else if !db.IsErrorNotFound(err) {
+ return errors.Default.Wrap(err, "failed
to query existing file metadata")
+ }
- err = db.Create(fileMeta)
- if err != nil {
- return errors.Default.Wrap(err, "failed to
create file metadata")
+ fileMeta := &models.QDevS3FileMeta{
+ ConnectionId: data.Options.ConnectionId,
+ FileName: *object.Key,
+ S3Path: *object.Key,
+ ScopeId: data.Options.ScopeId,
+ Processed: false,
+ }
+
+ err = db.Create(fileMeta)
+ if err != nil {
+ return errors.Default.Wrap(err, "failed
to create file metadata")
+ }
+
+ taskCtx.IncProgress(1)
}
- taskCtx.IncProgress(1)
- }
+ if !*result.IsTruncated {
+ break
+ }
- // 如果没有更多对象,退出循环
- if !*result.IsTruncated {
- break
+ continuationToken = result.NextContinuationToken
}
-
- continuationToken = result.NextContinuationToken
}
return nil
diff --git a/backend/plugins/q_dev/tasks/task_data.go
b/backend/plugins/q_dev/tasks/task_data.go
index 00c58f11e..3fd3c6584 100644
--- a/backend/plugins/q_dev/tasks/task_data.go
+++ b/backend/plugins/q_dev/tasks/task_data.go
@@ -29,12 +29,17 @@ type QDevOptions struct {
ConnectionId uint64 `json:"connectionId"`
S3Prefix string `json:"s3Prefix"`
ScopeId string `json:"scopeId"`
+ AccountId string `json:"accountId"`
+ BasePath string `json:"basePath"`
+ Year int `json:"year"`
+ Month *int `json:"month"`
}
type QDevTaskData struct {
Options *QDevOptions
S3Client *QDevS3Client
- IdentityClient *QDevIdentityClient // New field for Identity Center
client
+ IdentityClient *QDevIdentityClient
+ S3Prefixes []string
}
type QDevS3Client struct {
diff --git a/backend/plugins/q_dev/tasks/task_data_test.go
b/backend/plugins/q_dev/tasks/task_data_test.go
index cb8f75437..757f27428 100644
--- a/backend/plugins/q_dev/tasks/task_data_test.go
+++ b/backend/plugins/q_dev/tasks/task_data_test.go
@@ -38,6 +38,7 @@ func TestQDevTaskData_WithIdentityClient(t *testing.T) {
StoreId: "d-1234567890",
Region: "us-west-2",
},
+ S3Prefixes: []string{"test-prefix/"},
}
assert.NotNil(t, taskData.IdentityClient)
@@ -45,6 +46,7 @@ func TestQDevTaskData_WithIdentityClient(t *testing.T) {
assert.Equal(t, "us-west-2", taskData.IdentityClient.Region)
assert.NotNil(t, taskData.S3Client)
assert.NotNil(t, taskData.Options)
+ assert.Equal(t, []string{"test-prefix/"}, taskData.S3Prefixes)
}
func TestQDevTaskData_WithoutIdentityClient(t *testing.T) {
@@ -68,9 +70,14 @@ func TestQDevTaskData_WithoutIdentityClient(t *testing.T) {
}
func TestQDevTaskData_AllFields(t *testing.T) {
+ month := 3
options := &QDevOptions{
ConnectionId: 123,
S3Prefix: "data/q-dev/",
+ AccountId: "034362076319",
+ BasePath: "user-report",
+ Year: 2026,
+ Month: &month,
}
s3Client := &QDevS3Client{
@@ -87,6 +94,10 @@ func TestQDevTaskData_AllFields(t *testing.T) {
Options: options,
S3Client: s3Client,
IdentityClient: identityClient,
+ S3Prefixes: []string{
+
"user-report/AWSLogs/034362076319/KiroLogs/by_user_analytic/us-east-1/2026/03",
+
"user-report/AWSLogs/034362076319/KiroLogs/user_report/us-east-1/2026/03",
+ },
}
// Verify all fields are properly set
@@ -97,9 +108,14 @@ func TestQDevTaskData_AllFields(t *testing.T) {
// Verify nested field access
assert.Equal(t, uint64(123), taskData.Options.ConnectionId)
assert.Equal(t, "data/q-dev/", taskData.Options.S3Prefix)
+ assert.Equal(t, "034362076319", taskData.Options.AccountId)
+ assert.Equal(t, "user-report", taskData.Options.BasePath)
+ assert.Equal(t, 2026, taskData.Options.Year)
+ assert.Equal(t, &month, taskData.Options.Month)
assert.Equal(t, "my-data-bucket", taskData.S3Client.Bucket)
assert.Equal(t, "d-9876543210", taskData.IdentityClient.StoreId)
assert.Equal(t, "eu-west-1", taskData.IdentityClient.Region)
+ assert.Len(t, taskData.S3Prefixes, 2)
}
func TestQDevTaskData_EmptyStruct(t *testing.T) {
diff --git a/config-ui/src/plugins/register/q-dev/data-scope.tsx
b/config-ui/src/plugins/register/q-dev/data-scope.tsx
index f6aa5a9b0..e576591b8 100644
--- a/config-ui/src/plugins/register/q-dev/data-scope.tsx
+++ b/config-ui/src/plugins/register/q-dev/data-scope.tsx
@@ -26,6 +26,7 @@ interface ScopeData {
year?: number;
month?: number | null;
basePath?: string;
+ accountId?: string;
}
interface ScopeItem {
@@ -46,7 +47,7 @@ const CURRENT_YEAR = new Date().getUTCFullYear();
const MONTHS = Array.from({ length: 12 }, (_, idx) => idx + 1);
const MONTH_LABELS = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug',
'Sep', 'Oct', 'Nov', 'Dec'];
-const DEFAULT_BASE_PATH = 'user-report/AWSLogs';
+const DEFAULT_BASE_PATH = 'user-report';
const ensureLeadingZero = (value: number) => value.toString().padStart(2, '0');
@@ -92,11 +93,14 @@ const extractScopeMeta = (item: ScopeItem) => {
const basePath = normalizeBasePath(data.basePath ?? (baseSegments.length ?
baseSegments.join('/') : ''));
+ const accountId = data.accountId ?? '';
+
return {
basePath,
year: typeof year === 'number' ? year : null,
month,
prefix,
+ accountId,
};
};
@@ -110,12 +114,25 @@ const deriveBasePathFromSelection = (items: ScopeItem[])
=> {
return undefined;
};
-const buildPrefix = (basePath: string, year: number, month: number | null) => {
+const deriveAccountIdFromSelection = (items: ScopeItem[]) => {
+ for (const item of items) {
+ const meta = extractScopeMeta(item);
+ if (meta.accountId) {
+ return meta.accountId;
+ }
+ }
+ return undefined;
+};
+
+const buildPrefix = (basePath: string, year: number, month: number | null,
accountId?: string) => {
const segments = [] as string[];
const sanitizedBase = normalizeBasePath(basePath);
if (sanitizedBase) {
segments.push(sanitizedBase);
}
+ if (accountId) {
+ segments.push(accountId);
+ }
segments.push(String(year));
if (month !== null && month !== undefined) {
segments.push(ensureLeadingZero(month));
@@ -123,13 +140,14 @@ const buildPrefix = (basePath: string, year: number,
month: number | null) => {
return segments.join('/');
};
-const createScopeItem = (basePath: string, year: number, month: number |
null): ScopeItem => {
+const createScopeItem = (basePath: string, year: number, month: number | null,
accountId?: string): ScopeItem => {
const sanitizedBase = normalizeBasePath(basePath);
- const prefix = buildPrefix(sanitizedBase, year, month);
+ const prefix = buildPrefix(sanitizedBase, year, month, accountId);
const isFullYear = month === null;
- const name = isFullYear
+ const timeLabel = isFullYear
? `${year} (Full Year)`
: `${year}-${ensureLeadingZero(month as number)} (${MONTH_LABELS[(month as
number) - 1]})`;
+ const name = accountId ? `${accountId} ${timeLabel}` : timeLabel;
return {
id: prefix,
@@ -137,6 +155,7 @@ const createScopeItem = (basePath: string, year: number,
month: number | null):
fullName: prefix,
data: {
basePath: sanitizedBase,
+ accountId: accountId || undefined,
prefix,
year,
month,
@@ -165,6 +184,7 @@ const MONTH_OPTIONS = MONTHS.map((value) => ({
type FormValues = {
basePath: string;
+ accountId: string;
year: number;
mode: 'year' | 'months';
months?: number[];
@@ -185,20 +205,32 @@ export const QDevDataScope = ({
[selectedItems],
);
+ const derivedAccountId = useMemo(
+ () => deriveAccountIdFromSelection(selectedItems) ?? '',
+ [selectedItems],
+ );
+
useEffect(() => {
if (!form.isFieldsTouched(['basePath'])) {
form.setFieldsValue({ basePath: derivedBasePath });
}
}, [derivedBasePath, form]);
+ useEffect(() => {
+ if (!form.isFieldsTouched(['accountId'])) {
+ form.setFieldsValue({ accountId: derivedAccountId });
+ }
+ }, [derivedAccountId, form]);
+
useEffect(() => {
form.setFieldsValue({ mode: 'year', year: form.getFieldValue('year') ??
CURRENT_YEAR });
}, [form]);
const handleAdd = async () => {
- const { basePath, year, mode, months = [] } = await form.validateFields();
+ const { basePath, accountId, year, mode, months = [] } = await
form.validateFields();
const normalizedBase = normalizeBasePath(basePath ?? '');
+ const normalizedAccountId = (accountId ?? '').trim();
const normalizedYear = Number(year);
if (!normalizedYear || Number.isNaN(normalizedYear)) {
return;
@@ -209,6 +241,7 @@ export const QDevDataScope = ({
const meta = extractScopeMeta(item);
return (
meta.basePath === normalizedBase &&
+ meta.accountId === normalizedAccountId &&
meta.year === normalizedYear &&
(meta.month === null || meta.month === undefined)
);
@@ -223,14 +256,14 @@ export const QDevDataScope = ({
const hasMonths = selectedItems.some((item) => {
const meta = extractScopeMeta(item);
- return meta.basePath === normalizedBase && meta.year ===
normalizedYear && meta.month !== null;
+ return meta.basePath === normalizedBase && meta.accountId ===
normalizedAccountId && meta.year === normalizedYear && meta.month !== null;
});
if (hasMonths) {
return;
}
- const item = createScopeItem(normalizedBase, normalizedYear, null);
+ const item = createScopeItem(normalizedBase, normalizedYear, null,
normalizedAccountId || undefined);
if (!currentIds.has(item.id) && !disabledIds.has(item.id)) {
additions.push(item);
}
@@ -249,7 +282,7 @@ export const QDevDataScope = ({
return;
}
- const item = createScopeItem(normalizedBase, normalizedYear, month);
+ const item = createScopeItem(normalizedBase, normalizedYear, month,
normalizedAccountId || undefined);
if (currentIds.has(item.id) || disabledIds.has(item.id)) {
return;
}
@@ -282,24 +315,34 @@ export const QDevDataScope = ({
render: (_: unknown, item) => formatScopeLabel(item),
},
{
- title: 'S3 Prefix',
+ title: 'Scope Path',
dataIndex: 'id',
key: 'prefix',
render: (_: unknown, item) => {
const meta = extractScopeMeta(item);
+ if (meta.accountId) {
+ const timePart = meta.month
+ ? `${meta.year}/${ensureLeadingZero(meta.month)}`
+ : `${meta.year}`;
+ return (
+ <Tooltip title={`Scans both by_user_analytic and user_report under
AWSLogs/${meta.accountId}/KiroLogs/…/${timePart}`}>
+ <Typography.Text
code>{meta.basePath}/…/{meta.accountId}/…/{timePart}</Typography.Text>
+ </Tooltip>
+ );
+ }
return <Typography.Text code>{meta.prefix}</Typography.Text>;
},
},
{
- title: 'Base Path',
+ title: 'Account ID',
dataIndex: 'id',
- key: 'basePath',
+ key: 'accountId',
render: (_: unknown, item) => {
const meta = extractScopeMeta(item);
- return meta.basePath ? (
- <Typography.Text>{meta.basePath}</Typography.Text>
+ return meta.accountId ? (
+ <Typography.Text>{meta.accountId}</Typography.Text>
) : (
- <Typography.Text type="secondary">(bucket root)</Typography.Text>
+ <Typography.Text type="secondary">—</Typography.Text>
);
},
},
@@ -335,6 +378,7 @@ export const QDevDataScope = ({
layout="inline"
initialValues={{
basePath: derivedBasePath,
+ accountId: derivedAccountId,
year: CURRENT_YEAR,
mode: 'year',
months: [],
@@ -346,9 +390,18 @@ export const QDevDataScope = ({
label="Base Path"
name="basePath"
style={{ flex: 1 }}
- tooltip="Common prefix in S3 between the bucket root and the year
directory"
+ tooltip="S3 prefix before the AWSLogs directory (e.g. 'user-report')"
+ >
+ <Input placeholder="e.g. user-report" />
+ </Form.Item>
+
+ <Form.Item
+ label="AWS Account ID"
+ name="accountId"
+ style={{ width: 200 }}
+ tooltip="AWS Account ID used in the S3 export path. When set, both
by_user_analytic and user_report paths are scanned automatically."
>
- <Input placeholder="user-report/AWSLogs/.../us-east-1" />
+ <Input placeholder="e.g. 034362076319" />
</Form.Item>
<Form.Item label="Year" name="year" rules={[{ required: true, message:
'Enter year' }]} style={{ width: 160 }}>
diff --git a/grafana/dashboards/qdev_user_data.json
b/grafana/dashboards/qdev_user_data.json
index ff55b9ff8..d80d57bab 100644
--- a/grafana/dashboards/qdev_user_data.json
+++ b/grafana/dashboards/qdev_user_data.json
@@ -789,7 +789,7 @@
},
"timepicker": {},
"timezone": "utc",
- "title": "Q Dev User Data Dashboard",
+ "title": "Kiro Code Metrics Dashboard",
"uid": "qdev_user_data",
"version": 1
}
\ No newline at end of file
diff --git a/grafana/dashboards/qdev_user_report.json
b/grafana/dashboards/qdev_user_report.json
new file mode 100644
index 000000000..e1a27bc53
--- /dev/null
+++ b/grafana/dashboards/qdev_user_report.json
@@ -0,0 +1,464 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "fiscalYearStartMonth": 0,
+ "graphTooltip": 0,
+ "id": 101,
+ "links": [],
+ "panels": [
+ {
+ "datasource": "mysql",
+ "description": "Overview of credits and usage metrics",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green"
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 6,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 1,
+ "options": {
+ "colorMode": "value",
+ "graphMode": "area",
+ "justifyMode": "auto",
+ "orientation": "auto",
+ "percentChangeColorMode": "standard",
+ "reduceOptions": {
+ "calcs": [
+ "sum"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "showPercentChange": false,
+ "text": {},
+ "textMode": "auto",
+ "wideLayout": true
+ },
+ "pluginVersion": "11.6.2",
+ "targets": [
+ {
+ "datasource": "mysql",
+ "editorMode": "code",
+ "format": "table",
+ "rawQuery": true,
+ "rawSql": "SELECT\n SUM(credits_used) as 'Total Credits Used',\n
COUNT(DISTINCT user_id) as 'Active Users',\n SUM(total_messages) as 'Total
Messages',\n SUM(chat_conversations) as 'Total Conversations'\nFROM
lake._tool_q_dev_user_report\nWHERE $__timeFilter(date)",
+ "refId": "A"
+ }
+ ],
+ "title": "Overview Stats",
+ "type": "stat"
+ },
+ {
+ "datasource": "mysql",
+ "description": "Daily credits consumed broken down by subscription tier",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisBorderShow": false,
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "barWidthFactor": 0.6,
+ "drawStyle": "line",
+ "fillOpacity": 10,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "smooth",
+ "lineWidth": 2,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "never",
+ "spanNulls": true,
+ "stacking": {
+ "group": "A",
+ "mode": "normal"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green"
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 24,
+ "x": 0,
+ "y": 6
+ },
+ "id": 2,
+ "options": {
+ "legend": {
+ "calcs": [
+ "mean",
+ "max",
+ "sum"
+ ],
+ "displayMode": "table",
+ "placement": "right",
+ "showLegend": true
+ },
+ "tooltip": {
+ "hideZeros": false,
+ "mode": "multi",
+ "sort": "none"
+ }
+ },
+ "pluginVersion": "11.6.2",
+ "targets": [
+ {
+ "datasource": "mysql",
+ "editorMode": "code",
+ "format": "time_series",
+ "rawQuery": true,
+ "rawSql": "SELECT\n date as time,\n subscription_tier as metric,\n
SUM(credits_used) as value\nFROM lake._tool_q_dev_user_report\nWHERE
$__timeFilter(date)\nGROUP BY date, subscription_tier\nORDER BY date",
+ "refId": "A"
+ }
+ ],
+ "title": "Daily Credits Consumed by Subscription Tier",
+ "type": "timeseries"
+ },
+ {
+ "datasource": "mysql",
+ "description": "Daily messages and conversations broken down by client
type",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "custom": {
+ "axisBorderShow": false,
+ "axisCenteredZero": false,
+ "axisColorMode": "text",
+ "axisLabel": "",
+ "axisPlacement": "auto",
+ "barAlignment": 0,
+ "barWidthFactor": 0.6,
+ "drawStyle": "line",
+ "fillOpacity": 10,
+ "gradientMode": "none",
+ "hideFrom": {
+ "legend": false,
+ "tooltip": false,
+ "viz": false
+ },
+ "insertNulls": false,
+ "lineInterpolation": "smooth",
+ "lineWidth": 2,
+ "pointSize": 5,
+ "scaleDistribution": {
+ "type": "linear"
+ },
+ "showPoints": "never",
+ "spanNulls": true,
+ "stacking": {
+ "group": "A",
+ "mode": "none"
+ },
+ "thresholdsStyle": {
+ "mode": "off"
+ }
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green"
+ }
+ ]
+ },
+ "unit": "short"
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 0,
+ "y": 14
+ },
+ "id": 3,
+ "options": {
+ "legend": {
+ "calcs": [
+ "mean",
+ "max",
+ "sum"
+ ],
+ "displayMode": "table",
+ "placement": "bottom",
+ "showLegend": true
+ },
+ "tooltip": {
+ "hideZeros": false,
+ "mode": "multi",
+ "sort": "none"
+ }
+ },
+ "pluginVersion": "11.6.2",
+ "targets": [
+ {
+ "datasource": "mysql",
+ "editorMode": "code",
+ "format": "time_series",
+ "rawQuery": true,
+ "rawSql": "SELECT\n date as time,\n SUM(CASE WHEN client_type =
'KIRO_IDE' THEN total_messages ELSE 0 END) as 'Messages (IDE)',\n SUM(CASE
WHEN client_type = 'KIRO_CLI' THEN total_messages ELSE 0 END) as 'Messages
(CLI)',\n SUM(CASE WHEN client_type = 'PLUGIN' THEN total_messages ELSE 0 END)
as 'Messages (Plugin)',\n SUM(chat_conversations) as 'Conversations'\nFROM
lake._tool_q_dev_user_report\nWHERE $__timeFilter(date)\nGROUP BY date\nORDER
BY date",
+ "refId": "A"
+ }
+ ],
+ "title": "Daily Messages & Conversations",
+ "type": "timeseries"
+ },
+ {
+ "datasource": "mysql",
+ "description": "Distribution of users across subscription tiers",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "palette-classic"
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green"
+ }
+ ]
+ }
+ },
+ "overrides": []
+ },
+ "gridPos": {
+ "h": 8,
+ "w": 12,
+ "x": 12,
+ "y": 14
+ },
+ "id": 4,
+ "options": {
+ "displayLabels": [
+ "name",
+ "percent"
+ ],
+ "legend": {
+ "displayMode": "table",
+ "placement": "right",
+ "showLegend": true,
+ "values": [
+ "value",
+ "percent"
+ ]
+ },
+ "pieType": "pie",
+ "reduceOptions": {
+ "calcs": [
+ "sum"
+ ],
+ "fields": "",
+ "values": false
+ },
+ "tooltip": {
+ "mode": "single",
+ "sort": "none"
+ }
+ },
+ "pluginVersion": "11.6.2",
+ "targets": [
+ {
+ "datasource": "mysql",
+ "editorMode": "code",
+ "format": "table",
+ "rawQuery": true,
+ "rawSql": "SELECT\n subscription_tier as 'Tier',\n COUNT(DISTINCT
user_id) as 'Users'\nFROM lake._tool_q_dev_user_report\nWHERE
$__timeFilter(date)\n AND subscription_tier IS NOT NULL\n AND
subscription_tier != ''\nGROUP BY subscription_tier\nORDER BY COUNT(DISTINCT
user_id) DESC",
+ "refId": "A"
+ }
+ ],
+ "title": "Subscription Tier Distribution",
+ "type": "piechart"
+ },
+ {
+ "datasource": "mysql",
+ "description": "Per-user credits, messages, and subscription details",
+ "fieldConfig": {
+ "defaults": {
+ "color": {
+ "mode": "thresholds"
+ },
+ "custom": {
+ "align": "auto",
+ "cellOptions": {
+ "type": "auto"
+ },
+ "filterable": true,
+ "inspect": false
+ },
+ "mappings": [],
+ "thresholds": {
+ "mode": "absolute",
+ "steps": [
+ {
+ "color": "green"
+ }
+ ]
+ }
+ },
+ "overrides": [
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Credits Used"
+ },
+ "properties": [
+ {
+ "id": "custom.cellOptions",
+ "value": {
+ "mode": "gradient",
+ "type": "gauge"
+ }
+ }
+ ]
+ },
+ {
+ "matcher": {
+ "id": "byName",
+ "options": "Overage"
+ },
+ "properties": [
+ {
+ "id": "custom.cellOptions",
+ "value": {
+ "mode": "basic",
+ "type": "color-background"
+ }
+ },
+ {
+ "id": "mappings",
+ "value": [
+ {
+ "options": {
+ "Yes": {
+ "color": "orange",
+ "index": 0
+ },
+ "No": {
+ "color": "green",
+ "index": 1
+ }
+ },
+ "type": "value"
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ },
+ "gridPos": {
+ "h": 10,
+ "w": 24,
+ "x": 0,
+ "y": 22
+ },
+ "id": 5,
+ "options": {
+ "cellHeight": "sm",
+ "footer": {
+ "countRows": false,
+ "fields": "",
+ "reducer": [
+ "sum"
+ ],
+ "show": false
+ },
+ "showHeader": true,
+ "sortBy": []
+ },
+ "pluginVersion": "11.6.2",
+ "targets": [
+ {
+ "datasource": "mysql",
+ "editorMode": "code",
+ "format": "table",
+ "rawQuery": true,
+ "rawSql": "SELECT\n COALESCE(display_name, user_id) as 'User',\n
subscription_tier as 'Tier',\n client_type as 'Client',\n SUM(credits_used)
as 'Credits Used',\n SUM(total_messages) as 'Messages',\n
SUM(chat_conversations) as 'Conversations',\n SUM(overage_credits_used) as
'Overage Credits',\n CASE WHEN MAX(CAST(overage_enabled AS UNSIGNED)) = 1 THEN
'Yes' ELSE 'No' END as 'Overage',\n MIN(date) as 'First Activity',\n
MAX(date) as 'Last Activity'\nFROM lake._tool_q_de [...]
+ "refId": "A"
+ }
+ ],
+ "title": "Per-User Credits & Activity",
+ "type": "table"
+ }
+ ],
+ "preload": false,
+ "refresh": "5m",
+ "schemaVersion": 41,
+ "tags": [
+ "q_dev",
+ "user_report",
+ "kiro"
+ ],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-30d",
+ "to": "now"
+ },
+ "timepicker": {},
+ "timezone": "utc",
+ "title": "Kiro Usage Dashboard",
+ "uid": "qdev_user_report",
+ "version": 1
+}