This is an automated email from the ASF dual-hosted git repository.

wusheng pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/skywalking-banyandb.git


The following commit(s) were added to refs/heads/main by this push:
     new 05d563e9 Enhance integration tests for spec-based writing (#906)
05d563e9 is described below

commit 05d563e9cbcc21ddd8312e174348de748d5a136d
Author: Huang Youliang <[email protected]>
AuthorDate: Mon Dec 22 09:36:10 2025 +0800

    Enhance integration tests for spec-based writing (#906)
---
 .../measure/testdata/group_stages/sw_spec.json     |  33 +++++
 .../measure/testdata/group_stages/sw_spec2.json    |  33 +++++
 pkg/test/measure/testdata/groups/sw_spec.json      |  19 +++
 pkg/test/measure/testdata/groups/sw_spec2.json     |  19 +++
 .../service_cpm_minute_spec.json                   |  15 ++
 .../service_cpm_minute_spec2.json                  |  15 ++
 pkg/test/measure/testdata/index_rules/id_spec.json |  10 ++
 .../measure/testdata/index_rules/id_spec2.json     |  10 ++
 .../testdata/measures/service_cpm_minute_spec.json |  40 ++++++
 .../measures/service_cpm_minute_spec2.json         |  40 ++++++
 pkg/test/stream/testdata/group.json                |  19 +++
 pkg/test/stream/testdata/group_with_stages.json    |  35 ++++-
 .../testdata/index_rule_bindings/sw_spec2.json     |  26 ++++
 pkg/test/stream/testdata/streams/sw_spec2.json     |  94 +++++++++++++
 .../trace/testdata/groups/test-trace-spec2.json    |  19 +++
 .../testdata/groups_stages/test-trace-spec2.json   |  33 +++++
 .../testdata/index_rule_bindings/sw_spec2.json     |  14 ++
 .../trace/testdata/index_rules/duration_spec2.json |   9 ++
 .../testdata/index_rules/timestamp_spec2.json      |   9 ++
 pkg/test/trace/testdata/traces/sw_spec2.json       |  44 ++++++
 test/cases/init.go                                 |  46 +++---
 test/cases/measure/data/data.go                    | 104 +++++++-------
 test/cases/measure/data/input/write_mixed.ql       |   5 +-
 test/cases/measure/data/input/write_mixed.yaml     |   7 +-
 test/cases/measure/data/input/write_spec.ql        |  22 ---
 test/cases/measure/data/input/write_spec.yaml      |  33 -----
 test/cases/measure/data/want/write_mixed.yaml      |  21 ++-
 test/cases/measure/data/want/write_spec.yaml       |  59 --------
 test/cases/measure/measure.go                      |   1 -
 test/cases/stream/data/data.go                     | 111 ++++++++-------
 .../data/input/{write_spec.ql => write_mixed.ql}   |   4 +-
 .../input/{write_spec.yaml => write_mixed.yaml}    |   5 +-
 .../stream/data/testdata/sw_schema_order.json      |  45 ++++++
 test/cases/stream/data/want/write_mixed.yaml       | 110 +++++++++++++++
 test/cases/stream/data/want/write_spec.yaml        |  65 ---------
 test/cases/stream/stream.go                        |   2 +-
 test/cases/trace/data/data.go                      | 154 ++++++++++-----------
 .../data/input/{write_spec.ql => write_mixed.ql}   |   4 +-
 .../data/input/{write_spec.yml => write_mixed.yml} |   5 +-
 .../cases/trace/data/testdata/sw_schema_order.json | 123 ++++++++++++++++
 test/cases/trace/data/want/write_mixed.yml         | 134 ++++++++++++++++++
 test/cases/trace/data/want/write_spec.yml          |  96 -------------
 test/cases/trace/trace.go                          |   2 +-
 43 files changed, 1203 insertions(+), 491 deletions(-)

diff --git a/pkg/test/measure/testdata/group_stages/sw_spec.json 
b/pkg/test/measure/testdata/group_stages/sw_spec.json
new file mode 100644
index 00000000..a8f4100f
--- /dev/null
+++ b/pkg/test/measure/testdata/group_stages/sw_spec.json
@@ -0,0 +1,33 @@
+{
+  "metadata": {
+    "name": "sw_spec"
+  },
+  "catalog": "CATALOG_MEASURE",
+  "resource_opts": {
+    "shard_num": 2,
+    "segment_interval": {
+      "unit": "UNIT_DAY",
+      "num": 1
+    },
+    "ttl": {
+      "unit": "UNIT_DAY",
+      "num": 7
+    },
+    "stages": [
+      {
+        "name": "warm",
+        "shard_num": 1,
+        "segment_interval": {
+          "unit": "UNIT_DAY",
+          "num": 3
+        },
+        "ttl": {
+          "unit": "UNIT_DAY",
+          "num": 7
+        },
+        "node_selector": "type=warm"
+      }
+    ]
+  },
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/measure/testdata/group_stages/sw_spec2.json 
b/pkg/test/measure/testdata/group_stages/sw_spec2.json
new file mode 100644
index 00000000..4c07d93c
--- /dev/null
+++ b/pkg/test/measure/testdata/group_stages/sw_spec2.json
@@ -0,0 +1,33 @@
+{
+  "metadata": {
+    "name": "sw_spec2"
+  },
+  "catalog": "CATALOG_MEASURE",
+  "resource_opts": {
+    "shard_num": 2,
+    "segment_interval": {
+      "unit": "UNIT_DAY",
+      "num": 1
+    },
+    "ttl": {
+      "unit": "UNIT_DAY",
+      "num": 7
+    },
+    "stages": [
+      {
+        "name": "warm",
+        "shard_num": 1,
+        "segment_interval": {
+          "unit": "UNIT_DAY",
+          "num": 3
+        },
+        "ttl": {
+          "unit": "UNIT_DAY",
+          "num": 7
+        },
+        "node_selector": "type=warm"
+      }
+    ]
+  },
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/measure/testdata/groups/sw_spec.json 
b/pkg/test/measure/testdata/groups/sw_spec.json
new file mode 100644
index 00000000..47d9d404
--- /dev/null
+++ b/pkg/test/measure/testdata/groups/sw_spec.json
@@ -0,0 +1,19 @@
+{
+  "metadata": {
+    "name": "sw_spec"
+  },
+  "catalog": "CATALOG_MEASURE",
+  "resource_opts": {
+    "shard_num": 2,
+    "segment_interval": {
+      "unit": "UNIT_DAY",
+      "num": 1
+    },
+    "ttl": {
+      "unit": "UNIT_DAY",
+      "num": 7
+    },
+    "replicas": 1
+  },
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/measure/testdata/groups/sw_spec2.json 
b/pkg/test/measure/testdata/groups/sw_spec2.json
new file mode 100644
index 00000000..90473439
--- /dev/null
+++ b/pkg/test/measure/testdata/groups/sw_spec2.json
@@ -0,0 +1,19 @@
+{
+  "metadata": {
+    "name": "sw_spec2"
+  },
+  "catalog": "CATALOG_MEASURE",
+  "resource_opts": {
+    "shard_num": 2,
+    "segment_interval": {
+      "unit": "UNIT_DAY",
+      "num": 1
+    },
+    "ttl": {
+      "unit": "UNIT_DAY",
+      "num": 7
+    },
+    "replicas": 1
+  },
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git 
a/pkg/test/measure/testdata/index_rule_bindings/service_cpm_minute_spec.json 
b/pkg/test/measure/testdata/index_rule_bindings/service_cpm_minute_spec.json
new file mode 100644
index 00000000..3435c645
--- /dev/null
+++ b/pkg/test/measure/testdata/index_rule_bindings/service_cpm_minute_spec.json
@@ -0,0 +1,15 @@
+{
+  "metadata": {
+    "name": "service_cpm_minute",
+    "group": "sw_spec"
+  },
+  "rules": ["id"],
+  "subject": {
+    "catalog": "CATALOG_MEASURE",
+    "name": "service_cpm_minute"
+  },
+  "begin_at": "2021-04-15T01:30:15.01Z",
+  "expire_at": "2121-04-15T01:30:15.01Z",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
+
diff --git 
a/pkg/test/measure/testdata/index_rule_bindings/service_cpm_minute_spec2.json 
b/pkg/test/measure/testdata/index_rule_bindings/service_cpm_minute_spec2.json
new file mode 100644
index 00000000..24576d3f
--- /dev/null
+++ 
b/pkg/test/measure/testdata/index_rule_bindings/service_cpm_minute_spec2.json
@@ -0,0 +1,15 @@
+{
+  "metadata": {
+    "name": "service_cpm_minute",
+    "group": "sw_spec2"
+  },
+  "rules": ["id"],
+  "subject": {
+    "catalog": "CATALOG_MEASURE",
+    "name": "service_cpm_minute"
+  },
+  "begin_at": "2021-04-15T01:30:15.01Z",
+  "expire_at": "2121-04-15T01:30:15.01Z",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
+
diff --git a/pkg/test/measure/testdata/index_rules/id_spec.json 
b/pkg/test/measure/testdata/index_rules/id_spec.json
new file mode 100644
index 00000000..74743d9e
--- /dev/null
+++ b/pkg/test/measure/testdata/index_rules/id_spec.json
@@ -0,0 +1,10 @@
+{
+  "metadata": {
+    "id": 4,
+    "name": "id",
+    "group": "sw_spec"
+  },
+  "tags": ["id"],
+  "type": "TYPE_INVERTED",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/measure/testdata/index_rules/id_spec2.json 
b/pkg/test/measure/testdata/index_rules/id_spec2.json
new file mode 100644
index 00000000..0a321105
--- /dev/null
+++ b/pkg/test/measure/testdata/index_rules/id_spec2.json
@@ -0,0 +1,10 @@
+{
+  "metadata": {
+    "id": 4,
+    "name": "id",
+    "group": "sw_spec2"
+  },
+  "tags": ["id"],
+  "type": "TYPE_INVERTED",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/measure/testdata/measures/service_cpm_minute_spec.json 
b/pkg/test/measure/testdata/measures/service_cpm_minute_spec.json
new file mode 100644
index 00000000..450990cf
--- /dev/null
+++ b/pkg/test/measure/testdata/measures/service_cpm_minute_spec.json
@@ -0,0 +1,40 @@
+{
+  "metadata": {
+    "name": "service_cpm_minute",
+    "group": "sw_spec"
+  },
+  "tag_families": [
+    {
+      "name": "default",
+      "tags": [
+        {
+          "name": "id",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "entity_id",
+          "type": "TAG_TYPE_STRING"
+        }
+      ]
+    }
+  ],
+  "fields": [
+    {
+      "name": "total",
+      "field_type": "FIELD_TYPE_INT",
+      "encoding_method": "ENCODING_METHOD_GORILLA",
+      "compression_method": "COMPRESSION_METHOD_ZSTD"
+    },
+    {
+      "name": "value",
+      "field_type": "FIELD_TYPE_INT",
+      "encoding_method": "ENCODING_METHOD_GORILLA",
+      "compression_method": "COMPRESSION_METHOD_ZSTD"
+    }
+  ],
+  "entity": {
+    "tag_names": ["entity_id"]
+  },
+  "interval": "1m",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/measure/testdata/measures/service_cpm_minute_spec2.json 
b/pkg/test/measure/testdata/measures/service_cpm_minute_spec2.json
new file mode 100644
index 00000000..d5a8b403
--- /dev/null
+++ b/pkg/test/measure/testdata/measures/service_cpm_minute_spec2.json
@@ -0,0 +1,40 @@
+{
+  "metadata": {
+    "name": "service_cpm_minute",
+    "group": "sw_spec2"
+  },
+  "tag_families": [
+    {
+      "name": "default",
+      "tags": [
+        {
+          "name": "id",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "entity_id",
+          "type": "TAG_TYPE_STRING"
+        }
+      ]
+    }
+  ],
+  "fields": [
+    {
+      "name": "total",
+      "field_type": "FIELD_TYPE_INT",
+      "encoding_method": "ENCODING_METHOD_GORILLA",
+      "compression_method": "COMPRESSION_METHOD_ZSTD"
+    },
+    {
+      "name": "value",
+      "field_type": "FIELD_TYPE_INT",
+      "encoding_method": "ENCODING_METHOD_GORILLA",
+      "compression_method": "COMPRESSION_METHOD_ZSTD"
+    }
+  ],
+  "entity": {
+    "tag_names": ["entity_id"]
+  },
+  "interval": "1m",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/stream/testdata/group.json 
b/pkg/test/stream/testdata/group.json
index 8e312f1c..2a65bf27 100644
--- a/pkg/test/stream/testdata/group.json
+++ b/pkg/test/stream/testdata/group.json
@@ -37,6 +37,25 @@
     },
     "updated_at": "2021-04-15T01:30:15.01Z"
   },
+  {
+    "metadata": {
+      "name": "default-spec2"
+    },
+    "catalog": "CATALOG_STREAM",
+    "resource_opts": {
+      "shard_num": 2,
+      "replicas": 1,
+      "segment_interval": {
+        "unit": "UNIT_DAY",
+        "num": 1
+      },
+      "ttl": {
+        "unit": "UNIT_DAY",
+        "num": 3
+      }
+    },
+    "updated_at": "2021-04-15T01:30:15.01Z"
+  },
   {
     "metadata": {
       "name": "updated"
diff --git a/pkg/test/stream/testdata/group_with_stages.json 
b/pkg/test/stream/testdata/group_with_stages.json
index 749bde93..36474e7f 100644
--- a/pkg/test/stream/testdata/group_with_stages.json
+++ b/pkg/test/stream/testdata/group_with_stages.json
@@ -97,5 +97,38 @@
       ]
     },
     "updated_at": "2021-04-15T01:30:15.01Z"
+  },
+  {
+    "metadata": {
+      "name": "default-spec2"
+    },
+    "catalog": "CATALOG_STREAM",
+    "resource_opts": {
+      "shard_num": 2,
+      "segment_interval": {
+        "unit": "UNIT_DAY",
+        "num": 1
+      },
+      "ttl": {
+        "unit": "UNIT_DAY",
+        "num": 3
+      },
+      "stages": [
+        {
+          "name": "warm",
+          "shard_num": 1,
+          "segment_interval": {
+            "unit": "UNIT_DAY",
+            "num": 3
+          },
+          "ttl": {
+            "unit": "UNIT_DAY",
+            "num": 7
+          },
+          "node_selector": "type=warm"
+        }
+      ]
+    },
+    "updated_at": "2021-04-15T01:30:15.01Z"
   }
-]
\ No newline at end of file
+]
diff --git a/pkg/test/stream/testdata/index_rule_bindings/sw_spec2.json 
b/pkg/test/stream/testdata/index_rule_bindings/sw_spec2.json
new file mode 100644
index 00000000..c645efc9
--- /dev/null
+++ b/pkg/test/stream/testdata/index_rule_bindings/sw_spec2.json
@@ -0,0 +1,26 @@
+{
+  "metadata": {
+    "name": "sw-spec2-index-rule-binding",
+    "group": "default-spec2"
+  },
+  "rules": [
+    "trace_id",
+    "duration",
+    "endpoint_id",
+    "status_code",
+    "http.method",
+    "db.instance",
+    "db.type",
+    "mq.broker",
+    "mq.queue",
+    "mq.topic",
+    "extended_tags"
+  ],
+  "subject": {
+    "catalog": "CATALOG_STREAM",
+    "name": "sw"
+  },
+  "begin_at": "2021-04-15T01:30:15.01Z",
+  "expire_at": "2121-04-15T01:30:15.01Z",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/stream/testdata/streams/sw_spec2.json 
b/pkg/test/stream/testdata/streams/sw_spec2.json
new file mode 100644
index 00000000..6f4fd30c
--- /dev/null
+++ b/pkg/test/stream/testdata/streams/sw_spec2.json
@@ -0,0 +1,94 @@
+{
+  "metadata": {
+    "name": "sw",
+    "group": "default-spec2"
+  },
+  "tag_families": [
+    {
+      "name": "data",
+      "tags": [
+        {
+          "name": "data_binary",
+          "type": "TAG_TYPE_DATA_BINARY"
+        }
+      ]
+    },
+    {
+      "name": "searchable",
+      "tags": [
+        {
+          "name": "trace_id",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "state",
+          "type": "TAG_TYPE_INT"
+        },
+        {
+          "name": "service_id",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "service_instance_id",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "endpoint_id",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "duration",
+          "type": "TAG_TYPE_INT"
+        },
+        {
+          "name": "start_time",
+          "type": "TAG_TYPE_INT"
+        },
+        {
+          "name": "http.method",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "status_code",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "span_id",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "db.type",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "db.instance",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "mq.queue",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "mq.topic",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "mq.broker",
+          "type": "TAG_TYPE_STRING"
+        },
+        {
+          "name": "extended_tags",
+          "type": "TAG_TYPE_STRING_ARRAY"
+        },
+        {
+          "name": "non_indexed_tags",
+          "type": "TAG_TYPE_STRING_ARRAY"
+        }
+      ]
+    }
+  ],
+  "entity": {
+    "tag_names": ["service_id", "service_instance_id", "state"]
+  },
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/trace/testdata/groups/test-trace-spec2.json 
b/pkg/test/trace/testdata/groups/test-trace-spec2.json
new file mode 100644
index 00000000..50044286
--- /dev/null
+++ b/pkg/test/trace/testdata/groups/test-trace-spec2.json
@@ -0,0 +1,19 @@
+{
+  "metadata": {
+    "name": "test-trace-spec2"
+  },
+  "catalog": "CATALOG_TRACE",
+  "resource_opts": {
+    "shard_num": 2,
+    "replicas": 0,
+    "segment_interval": {
+      "unit": "UNIT_DAY",
+      "num": 1
+    },
+    "ttl": {
+      "unit": "UNIT_DAY",
+      "num": 3
+    }
+  },
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/trace/testdata/groups_stages/test-trace-spec2.json 
b/pkg/test/trace/testdata/groups_stages/test-trace-spec2.json
new file mode 100644
index 00000000..49d09315
--- /dev/null
+++ b/pkg/test/trace/testdata/groups_stages/test-trace-spec2.json
@@ -0,0 +1,33 @@
+{
+  "metadata": {
+    "name": "test-trace-spec2"
+  },
+  "catalog": "CATALOG_TRACE",
+  "resource_opts": {
+    "shard_num": 2,
+    "segment_interval": {
+      "unit": "UNIT_DAY",
+      "num": 1
+    },
+    "ttl": {
+      "unit": "UNIT_DAY",
+      "num": 3
+    },
+    "stages": [
+      {
+        "name": "warm",
+        "shard_num": 2,
+        "segment_interval": {
+          "unit": "UNIT_DAY",
+          "num": 3
+        },
+        "ttl": {
+          "unit": "UNIT_DAY",
+          "num": 7
+        },
+        "node_selector": "type=warm"
+      }
+    ]
+  },
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/trace/testdata/index_rule_bindings/sw_spec2.json 
b/pkg/test/trace/testdata/index_rule_bindings/sw_spec2.json
new file mode 100644
index 00000000..4daee2a9
--- /dev/null
+++ b/pkg/test/trace/testdata/index_rule_bindings/sw_spec2.json
@@ -0,0 +1,14 @@
+{
+  "metadata": {
+    "name": "sw-spec2-index-rule-binding",
+    "group": "test-trace-spec2"
+  },
+  "rules": ["duration", "timestamp"],
+  "subject": {
+    "catalog": "CATALOG_TRACE",
+    "name": "sw"
+  },
+  "begin_at": "2021-04-15T01:30:15.01Z",
+  "expire_at": "2121-04-15T01:30:15.01Z",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/trace/testdata/index_rules/duration_spec2.json 
b/pkg/test/trace/testdata/index_rules/duration_spec2.json
new file mode 100644
index 00000000..3d3240f3
--- /dev/null
+++ b/pkg/test/trace/testdata/index_rules/duration_spec2.json
@@ -0,0 +1,9 @@
+{
+  "metadata": {
+    "name": "duration",
+    "group": "test-trace-spec2"
+  },
+  "tags": ["service_id", "service_instance_id", "state", "duration"],
+  "type": "TYPE_TREE",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/trace/testdata/index_rules/timestamp_spec2.json 
b/pkg/test/trace/testdata/index_rules/timestamp_spec2.json
new file mode 100644
index 00000000..45c89630
--- /dev/null
+++ b/pkg/test/trace/testdata/index_rules/timestamp_spec2.json
@@ -0,0 +1,9 @@
+{
+  "metadata": {
+    "name": "timestamp",
+    "group": "test-trace-spec2"
+  },
+  "tags": ["service_id", "service_instance_id", "state", "timestamp"],
+  "type": "TYPE_TREE",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/pkg/test/trace/testdata/traces/sw_spec2.json 
b/pkg/test/trace/testdata/traces/sw_spec2.json
new file mode 100644
index 00000000..c568dce6
--- /dev/null
+++ b/pkg/test/trace/testdata/traces/sw_spec2.json
@@ -0,0 +1,44 @@
+{
+  "metadata": {
+    "name": "sw",
+    "group": "test-trace-spec2"
+  },
+  "tags": [
+    {
+      "name": "trace_id",
+      "type": "TAG_TYPE_STRING"
+    },
+    {
+      "name": "state",
+      "type": "TAG_TYPE_INT"
+    },
+    {
+      "name": "service_id",
+      "type": "TAG_TYPE_STRING"
+    },
+    {
+      "name": "service_instance_id",
+      "type": "TAG_TYPE_STRING"
+    },
+    {
+      "name": "endpoint_id",
+      "type": "TAG_TYPE_STRING"
+    },
+    {
+      "name": "duration",
+      "type": "TAG_TYPE_INT"
+    },
+    {
+      "name": "span_id",
+      "type": "TAG_TYPE_STRING"
+    },
+    {
+      "name": "timestamp",
+      "type": "TAG_TYPE_TIMESTAMP"
+    }
+  ],
+  "trace_id_tag_name": "trace_id",
+  "span_id_tag_name": "span_id",
+  "timestamp_tag_name": "timestamp",
+  "updated_at": "2021-04-15T01:30:15.01Z"
+}
diff --git a/test/cases/init.go b/test/cases/init.go
index 8cc611e9..55d2b3f0 100644
--- a/test/cases/init.go
+++ b/test/cases/init.go
@@ -25,6 +25,7 @@ import (
        "google.golang.org/grpc"
        "google.golang.org/grpc/credentials/insecure"
 
+       commonv1 
"github.com/apache/skywalking-banyandb/api/proto/banyandb/common/v1"
        measurev1 
"github.com/apache/skywalking-banyandb/api/proto/banyandb/measure/v1"
        streamv1 
"github.com/apache/skywalking-banyandb/api/proto/banyandb/stream/v1"
        tracev1 
"github.com/apache/skywalking-banyandb/api/proto/banyandb/trace/v1"
@@ -45,8 +46,12 @@ func Initialize(addr string, now time.Time) {
        casesstreamdata.Write(conn, "sw", now, interval)
        casesstreamdata.Write(conn, "duplicated", now, 0)
        casesstreamdata.WriteToGroup(conn, "sw", "updated", "sw_updated", 
now.Add(time.Minute), interval)
-       casesstreamdata.WriteWithSpec(conn, "sw", "default-spec", 
now.Add(2*time.Minute), interval,
-               casesstreamdata.SpecWithData{
+       casesstreamdata.WriteMixed(conn, now.Add(2*time.Minute), interval,
+               casesstreamdata.WriteSpec{
+                       Metadata: &commonv1.Metadata{Name: "sw", Group: 
"default-spec"},
+                       DataFile: "sw_schema_order.json",
+               },
+               casesstreamdata.WriteSpec{
                        Spec: []*streamv1.TagFamilySpec{
                                {
                                        Name:     "data",
@@ -59,7 +64,8 @@ func Initialize(addr string, now time.Time) {
                        },
                        DataFile: "sw_spec_order.json",
                },
-               casesstreamdata.SpecWithData{
+               casesstreamdata.WriteSpec{
+                       Metadata: &commonv1.Metadata{Name: "sw", Group: 
"default-spec2"},
                        Spec: []*streamv1.TagFamilySpec{
                                {
                                        Name:     "searchable",
@@ -91,8 +97,12 @@ func Initialize(addr string, now time.Time) {
        casesmeasuredata.Write(conn, "endpoint_traffic", "sw_metric", 
"endpoint_traffic.json", now, interval)
        casesmeasuredata.Write(conn, "duplicated", "exception", 
"duplicated.json", now, 0)
        casesmeasuredata.Write(conn, "service_cpm_minute", "sw_updated", 
"service_cpm_minute_updated_data.json", now.Add(10*time.Minute), interval)
-       casesmeasuredata.WriteWithSpec(conn, "service_cpm_minute", "sw_metric", 
now.Add(20*time.Minute), interval,
-               casesmeasuredata.SpecWithData{
+       casesmeasuredata.WriteMixed(conn, now.Add(30*time.Minute), interval,
+               casesmeasuredata.WriteSpec{
+                       Metadata: &commonv1.Metadata{Name: 
"service_cpm_minute", Group: "sw_spec"},
+                       DataFile: "service_cpm_minute_schema_order.json",
+               },
+               casesmeasuredata.WriteSpec{
                        Spec: &measurev1.DataPointSpec{
                                TagFamilySpec: []*measurev1.TagFamilySpec{
                                        {
@@ -104,7 +114,8 @@ func Initialize(addr string, now time.Time) {
                        },
                        DataFile: "service_cpm_minute_spec_order.json",
                },
-               casesmeasuredata.SpecWithData{
+               casesmeasuredata.WriteSpec{
+                       Metadata: &commonv1.Metadata{Name: 
"service_cpm_minute", Group: "sw_spec2"},
                        Spec: &measurev1.DataPointSpec{
                                TagFamilySpec: []*measurev1.TagFamilySpec{
                                        {
@@ -116,18 +127,6 @@ func Initialize(addr string, now time.Time) {
                        },
                        DataFile: "service_cpm_minute_spec_order2.json",
                })
-       casesmeasuredata.WriteMixed(conn, "service_cpm_minute", "sw_metric",
-               "service_cpm_minute_schema_order.json", 
"service_cpm_minute_spec_order.json",
-               now.Add(30*time.Minute), interval, 2*time.Minute,
-               &measurev1.DataPointSpec{
-                       TagFamilySpec: []*measurev1.TagFamilySpec{
-                               {
-                                       Name:     "default",
-                                       TagNames: []string{"entity_id", "id"},
-                               },
-                       },
-                       FieldNames: []string{"value", "total"},
-               })
        time.Sleep(5 * time.Second)
        // trace
        interval = 500 * time.Millisecond
@@ -136,14 +135,19 @@ func Initialize(addr string, now time.Time) {
        casestrace.WriteToGroup(conn, "sw", "test-trace-updated", "sw_updated", 
now.Add(time.Minute), interval)
        time.Sleep(5 * time.Second)
        casestrace.WriteToGroup(conn, "sw", "test-trace-group", 
"sw_mixed_traces", now.Add(time.Minute), interval)
-       casestrace.WriteWithSpec(conn, "sw", "test-trace-spec", 
now.Add(2*time.Minute), interval,
-               casestrace.SpecWithData{
+       casestrace.WriteMixed(conn, now.Add(2*time.Minute), interval,
+               casestrace.WriteSpec{
+                       Metadata: &commonv1.Metadata{Name: "sw", Group: 
"test-trace-spec"},
+                       DataFile: "sw_schema_order.json",
+               },
+               casestrace.WriteSpec{
                        Spec: &tracev1.TagSpec{
                                TagNames: []string{"trace_id", "state", 
"service_id", "service_instance_id", "endpoint_id", "duration", "span_id", 
"timestamp"},
                        },
                        DataFile: "sw_spec_order.json",
                },
-               casestrace.SpecWithData{
+               casestrace.WriteSpec{
+                       Metadata: &commonv1.Metadata{Name: "sw", Group: 
"test-trace-spec2"},
                        Spec: &tracev1.TagSpec{
                                TagNames: []string{"span_id", "duration", 
"endpoint_id", "service_instance_id", "service_id", "state", "trace_id", 
"timestamp"},
                        },
diff --git a/test/cases/measure/data/data.go b/test/cases/measure/data/data.go
index 49f666b2..98ddbf98 100644
--- a/test/cases/measure/data/data.go
+++ b/test/cases/measure/data/data.go
@@ -336,37 +336,36 @@ func WriteOnly(conn *grpclib.ClientConn, name, group, 
dataFile string,
        return writeClient
 }
 
-// SpecWithData pairs a DataPointSpec with a data file.
-type SpecWithData struct {
+// WriteSpec defines the specification for writing measure data.
+type WriteSpec struct {
+       Metadata *commonv1.Metadata
        Spec     *measurev1.DataPointSpec
        DataFile string
 }
 
 // WriteWithSpec writes data using multiple data_point_specs to specify tag 
and field names.
-func WriteWithSpec(conn *grpclib.ClientConn, name, group string,
-       baseTime time.Time, interval time.Duration, specDataPairs 
...SpecWithData,
-) {
+func WriteWithSpec(conn *grpclib.ClientConn, baseTime time.Time, interval 
time.Duration, writeSpecs ...WriteSpec) {
        ctx := context.Background()
-       md := &commonv1.Metadata{
-               Name:  name,
-               Group: group,
-       }
-
        schemaClient := databasev1.NewMeasureRegistryServiceClient(conn)
-       resp, err := schemaClient.Get(ctx, 
&databasev1.MeasureRegistryServiceGetRequest{Metadata: md})
-       gm.Expect(err).NotTo(gm.HaveOccurred())
-       md = resp.GetMeasure().GetMetadata()
-
        c := measurev1.NewMeasureServiceClient(conn)
        writeClient, err := c.Write(ctx)
        gm.Expect(err).NotTo(gm.HaveOccurred())
 
-       isFirstRequest := true
+       var currentMd *commonv1.Metadata
        currentTime := baseTime
-       for _, pair := range specDataPairs {
+       for _, ws := range writeSpecs {
+               needMetadataUpdate := false
+               if ws.Metadata != nil {
+                       resp, getErr := schemaClient.Get(ctx, 
&databasev1.MeasureRegistryServiceGetRequest{Metadata: ws.Metadata})
+                       gm.Expect(getErr).NotTo(gm.HaveOccurred())
+                       currentMd = resp.GetMeasure().GetMetadata()
+                       needMetadataUpdate = true
+               }
+               gm.Expect(currentMd).NotTo(gm.BeNil(), "first WriteSpec must 
have Metadata")
+
                var templates []interface{}
-               content, err := dataFS.ReadFile("testdata/" + pair.DataFile)
-               gm.Expect(err).ShouldNot(gm.HaveOccurred())
+               content, readErr := dataFS.ReadFile("testdata/" + ws.DataFile)
+               gm.Expect(readErr).ShouldNot(gm.HaveOccurred())
                gm.Expect(json.Unmarshal(content, 
&templates)).ShouldNot(gm.HaveOccurred())
 
                isFirstForSpec := true
@@ -376,16 +375,14 @@ func WriteWithSpec(conn *grpclib.ClientConn, name, group 
string,
                        dataPointValue := &measurev1.DataPointValue{}
                        gm.Expect(protojson.Unmarshal(rawDataPointValue, 
dataPointValue)).ShouldNot(gm.HaveOccurred())
                        dataPointValue.Timestamp = 
timestamppb.New(currentTime.Add(time.Duration(i) * interval))
-                       req := &measurev1.WriteRequest{
-                               DataPoint: dataPointValue,
-                               MessageId: uint64(time.Now().UnixNano()),
-                       }
-                       if isFirstRequest {
-                               req.Metadata = md
-                               isFirstRequest = false
-                       }
+                       req := &measurev1.WriteRequest{DataPoint: 
dataPointValue, MessageId: uint64(time.Now().UnixNano())}
                        if isFirstForSpec {
-                               req.DataPointSpec = pair.Spec
+                               if ws.Spec != nil {
+                                       req.DataPointSpec = ws.Spec
+                               }
+                               if needMetadataUpdate {
+                                       req.Metadata = currentMd
+                               }
                                isFirstForSpec = false
                        }
                        gm.Expect(writeClient.Send(req)).Should(gm.Succeed())
@@ -395,38 +392,49 @@ func WriteWithSpec(conn *grpclib.ClientConn, name, group 
string,
 
        gm.Expect(writeClient.CloseSend()).To(gm.Succeed())
        gm.Eventually(func() error {
-               _, err := writeClient.Recv()
-               return err
+               _, recvErr := writeClient.Recv()
+               return recvErr
        }, flags.EventuallyTimeout).Should(gm.Equal(io.EOF))
 }
 
-// WriteMixed writes data in mixed mode: first following the schema order and 
then switching to spec mode.
-func WriteMixed(conn *grpclib.ClientConn, name, group string,
-       schemaDataFile, specDataFile string,
-       baseTime time.Time, interval time.Duration,
-       specStartOffset time.Duration, spec *measurev1.DataPointSpec,
-) {
+// WriteMixed writes measure data in schema order first, and then in spec 
order.
+func WriteMixed(conn *grpclib.ClientConn, baseTime time.Time, interval 
time.Duration, writeSpecs ...WriteSpec) {
        ctx := context.Background()
-       metadata := &commonv1.Metadata{
-               Name:  name,
-               Group: group,
-       }
-
        schemaClient := databasev1.NewMeasureRegistryServiceClient(conn)
-       resp, err := schemaClient.Get(ctx, 
&databasev1.MeasureRegistryServiceGetRequest{Metadata: metadata})
-       gm.Expect(err).NotTo(gm.HaveOccurred())
-       metadata = resp.GetMeasure().GetMetadata()
-
        c := measurev1.NewMeasureServiceClient(conn)
        writeClient, err := c.Write(ctx)
        gm.Expect(err).NotTo(gm.HaveOccurred())
 
-       loadData(metadata, writeClient, schemaDataFile, baseTime, interval)
-       loadDataWithSpec(nil, writeClient, specDataFile, 
baseTime.Add(specStartOffset), interval, spec)
+       var currentMd *commonv1.Metadata
+       currentTime := baseTime
+       for idx, ws := range writeSpecs {
+               if ws.Metadata != nil {
+                       resp, getErr := schemaClient.Get(ctx, 
&databasev1.MeasureRegistryServiceGetRequest{Metadata: ws.Metadata})
+                       gm.Expect(getErr).NotTo(gm.HaveOccurred())
+                       currentMd = resp.GetMeasure().GetMetadata()
+               }
+               gm.Expect(currentMd).NotTo(gm.BeNil(), "first WriteSpec must 
have Metadata")
+
+               var templates []interface{}
+               content, readErr := dataFS.ReadFile("testdata/" + ws.DataFile)
+               gm.Expect(readErr).ShouldNot(gm.HaveOccurred())
+               gm.Expect(json.Unmarshal(content, 
&templates)).ShouldNot(gm.HaveOccurred())
+
+               if idx == 0 {
+                       loadData(currentMd, writeClient, ws.DataFile, 
currentTime.Add(time.Duration(len(templates)-1)*interval), interval)
+               } else {
+                       var mdToSend *commonv1.Metadata
+                       if ws.Metadata != nil {
+                               mdToSend = currentMd
+                       }
+                       loadDataWithSpec(mdToSend, writeClient, ws.DataFile, 
currentTime.Add(time.Duration(len(templates)-1)*interval), interval, ws.Spec)
+               }
+               currentTime = currentTime.Add(time.Duration(len(templates)) * 
interval)
+       }
 
        gm.Expect(writeClient.CloseSend()).To(gm.Succeed())
        gm.Eventually(func() error {
-               _, err := writeClient.Recv()
-               return err
+               _, recvErr := writeClient.Recv()
+               return recvErr
        }, flags.EventuallyTimeout).Should(gm.Equal(io.EOF))
 }
diff --git a/test/cases/measure/data/input/write_mixed.ql 
b/test/cases/measure/data/input/write_mixed.ql
index 0382cb8b..4d602d25 100644
--- a/test/cases/measure/data/input/write_mixed.ql
+++ b/test/cases/measure/data/input/write_mixed.ql
@@ -16,7 +16,6 @@
 # under the License.
 
 
-SELECT id, entity_id, total, value FROM MEASURE service_cpm_minute IN sw_metric
+SELECT id, entity_id, total, value FROM MEASURE service_cpm_minute IN sw_spec, 
sw_spec2
 TIME > '-15m'
-WHERE id IN ('id_schema_1', 'id_schema_2', 'id_schema_3', 'id_spec_1', 
'id_spec_2', 'id_spec_3')
-
+WHERE id IN ('id_schema_1', 'id_schema_2', 'id_schema_3', 'id_spec_1', 
'id_spec_2', 'id_spec_3', 'id_spec_5')
diff --git a/test/cases/measure/data/input/write_mixed.yaml 
b/test/cases/measure/data/input/write_mixed.yaml
index aefba1bb..991cda2b 100644
--- a/test/cases/measure/data/input/write_mixed.yaml
+++ b/test/cases/measure/data/input/write_mixed.yaml
@@ -16,11 +16,11 @@
 # under the License.
 
 name: "service_cpm_minute"
-groups: ["sw_metric"]
+groups: ["sw_spec", "sw_spec2"]
 tagProjection:
   tagFamilies:
-  - name: "default"
-    tags: ["id", "entity_id"]
+    - name: "default"
+      tags: ["id", "entity_id"]
 fieldProjection:
   names: ["total", "value"]
 criteria:
@@ -37,5 +37,6 @@ criteria:
             "id_spec_1",
             "id_spec_2",
             "id_spec_3",
+            "id_spec_5",
           ]
 
diff --git a/test/cases/measure/data/input/write_spec.ql 
b/test/cases/measure/data/input/write_spec.ql
deleted file mode 100644
index d3111de7..00000000
--- a/test/cases/measure/data/input/write_spec.ql
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to Apache Software Foundation (ASF) under one or more contributor
-# license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright
-# ownership. Apache Software Foundation (ASF) licenses this file to you under
-# the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-
-SELECT id, entity_id, total, value FROM MEASURE service_cpm_minute IN sw_metric
-TIME > '-15m'
-WHERE id IN ('id_spec_2', 'id_spec_5')
-
diff --git a/test/cases/measure/data/input/write_spec.yaml 
b/test/cases/measure/data/input/write_spec.yaml
deleted file mode 100644
index afd48e2b..00000000
--- a/test/cases/measure/data/input/write_spec.yaml
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to Apache Software Foundation (ASF) under one or more contributor
-# license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright
-# ownership. Apache Software Foundation (ASF) licenses this file to you under
-# the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: "service_cpm_minute"
-groups: ["sw_metric"]
-tagProjection:
-  tagFamilies:
-    - name: "default"
-      tags: ["id", "entity_id"]
-fieldProjection:
-  names: ["total", "value"]
-criteria:
-  condition:
-    name: "id"
-    op: "BINARY_OP_IN"
-    value:
-      strArray:
-        value: ["id_spec_2", "id_spec_5"]
-
diff --git a/test/cases/measure/data/want/write_mixed.yaml 
b/test/cases/measure/data/want/write_mixed.yaml
index b8f734fa..9dc6b7e6 100644
--- a/test/cases/measure/data/want/write_mixed.yaml
+++ b/test/cases/measure/data/want/write_mixed.yaml
@@ -36,6 +36,26 @@ dataPoints:
         value:
           int:
             value: "200"
+  - tagFamilies:
+      - name: default
+        tags:
+          - key: id
+            value:
+              str:
+                value: id_spec_5
+          - key: entity_id
+            value:
+              str:
+                value: entity_spec_5
+    fields:
+      - name: total
+        value:
+          int:
+            value: "50"
+      - name: value
+        value:
+          int:
+            value: "500"
   - tagFamilies:
       - name: default
         tags:
@@ -136,4 +156,3 @@ dataPoints:
         value:
           int:
             value: "100"
-
diff --git a/test/cases/measure/data/want/write_spec.yaml 
b/test/cases/measure/data/want/write_spec.yaml
deleted file mode 100644
index 974802c4..00000000
--- a/test/cases/measure/data/want/write_spec.yaml
+++ /dev/null
@@ -1,59 +0,0 @@
-# Licensed to Apache Software Foundation (ASF) under one or more contributor
-# license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright
-# ownership. Apache Software Foundation (ASF) licenses this file to you under
-# the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-dataPoints:
-  - tagFamilies:
-      - name: default
-        tags:
-          - key: id
-            value:
-              str:
-                value: id_spec_2
-          - key: entity_id
-            value:
-              str:
-                value: entity_spec_2
-    fields:
-      - name: total
-        value:
-          int:
-            value: "20"
-      - name: value
-        value:
-          int:
-            value: "200"
-  - tagFamilies:
-      - name: default
-        tags:
-          - key: id
-            value:
-              str:
-                value: id_spec_5
-          - key: entity_id
-            value:
-              str:
-                value: entity_spec_5
-    fields:
-      - name: total
-        value:
-          int:
-            value: "50"
-      - name: value
-        value:
-          int:
-            value: "500"
-
diff --git a/test/cases/measure/measure.go b/test/cases/measure/measure.go
index ae9c846b..c680912d 100644
--- a/test/cases/measure/measure.go
+++ b/test/cases/measure/measure.go
@@ -89,6 +89,5 @@ var _ = g.DescribeTable("Scanning Measures", verify,
        g.Entry("multi groups: new tag and fields", helpers.Args{Input: 
"multi_group_new_tag_field", Duration: 35 * time.Minute, Offset: -20 * 
time.Minute}),
        g.Entry("filter by non-existent tag", helpers.Args{Input: 
"filter_non_existent_tag", Duration: 25 * time.Minute, Offset: -20 * 
time.Minute, WantErr: true}),
        g.Entry("project non-existent tag", helpers.Args{Input: 
"project_non_existent_tag", Duration: 25 * time.Minute, Offset: -20 * 
time.Minute, WantErr: true}),
-       g.Entry("write spec", helpers.Args{Input: "write_spec", Duration: 15 * 
time.Minute, Offset: 15 * time.Minute, DisOrder: true}),
        g.Entry("write mixed", helpers.Args{Input: "write_mixed", Duration: 15 
* time.Minute, Offset: 25 * time.Minute, DisOrder: true}),
 )
diff --git a/test/cases/stream/data/data.go b/test/cases/stream/data/data.go
index 64c96985..a53b78bf 100644
--- a/test/cases/stream/data/data.go
+++ b/test/cases/stream/data/data.go
@@ -136,21 +136,24 @@ var VerifyFn = func(innerGm gm.Gomega, sharedContext 
helpers.SharedContext, args
        innerGm.Expect(resp.Trace.GetSpans()).NotTo(gm.BeEmpty())
 }
 
-func loadData(stream streamv1.StreamService_WriteClient, metadata 
*commonv1.Metadata, dataFile string, baseTime time.Time, interval 
time.Duration) {
+func loadData(stream streamv1.StreamService_WriteClient, metadata 
*commonv1.Metadata, dataFile string, baseTime time.Time, interval 
time.Duration, elementCounter *int) {
        var templates []interface{}
        content, err := dataFS.ReadFile("testdata/" + dataFile)
        gm.Expect(err).ShouldNot(gm.HaveOccurred())
        gm.Expect(json.Unmarshal(content, 
&templates)).ShouldNot(gm.HaveOccurred())
+       gm.Expect(elementCounter).NotTo(gm.BeNil(), "elementCounter must be 
provided")
        bb, _ := base64.StdEncoding.DecodeString("YWJjMTIzIT8kKiYoKSctPUB+")
 
-       for i, template := range templates {
+       for _, template := range templates {
                rawSearchTagFamily, errMarshal := json.Marshal(template)
                gm.Expect(errMarshal).ShouldNot(gm.HaveOccurred())
                searchTagFamily := &modelv1.TagFamilyForWrite{}
                gm.Expect(protojson.Unmarshal(rawSearchTagFamily, 
searchTagFamily)).ShouldNot(gm.HaveOccurred())
+               elementID := *elementCounter
+               *elementCounter++
                e := &streamv1.ElementValue{
-                       ElementId: strconv.Itoa(i),
-                       Timestamp: timestamppb.New(baseTime.Add(interval * 
time.Duration(i))),
+                       ElementId: strconv.Itoa(elementID),
+                       Timestamp: timestamppb.New(baseTime.Add(interval * 
time.Duration(elementID))),
                        TagFamilies: []*modelv1.TagFamilyForWrite{
                                {
                                        Tags: []*modelv1.TagValue{
@@ -173,6 +176,35 @@ func loadData(stream streamv1.StreamService_WriteClient, 
metadata *commonv1.Meta
        }
 }
 
+func loadDataWithSpec(stream streamv1.StreamService_WriteClient, md 
*commonv1.Metadata, dataFile string,
+       baseTime time.Time, interval time.Duration, spec 
[]*streamv1.TagFamilySpec, elementCounter *int,
+) {
+       var templates []interface{}
+       content, err := dataFS.ReadFile("testdata/" + dataFile)
+       gm.Expect(err).ShouldNot(gm.HaveOccurred())
+       gm.Expect(json.Unmarshal(content, 
&templates)).ShouldNot(gm.HaveOccurred())
+       gm.Expect(elementCounter).NotTo(gm.BeNil(), "elementCounter must be 
provided")
+
+       isFirst := true
+       for _, template := range templates {
+               rawElementValue, errMarshal := json.Marshal(template)
+               gm.Expect(errMarshal).ShouldNot(gm.HaveOccurred())
+               elementValue := &streamv1.ElementValue{}
+               gm.Expect(protojson.Unmarshal(rawElementValue, 
elementValue)).ShouldNot(gm.HaveOccurred())
+               elementID := *elementCounter
+               *elementCounter++
+               elementValue.ElementId = strconv.Itoa(elementID)
+               elementValue.Timestamp = 
timestamppb.New(baseTime.Add(time.Duration(elementID) * interval))
+               req := &streamv1.WriteRequest{Element: elementValue, MessageId: 
uint64(time.Now().UnixNano())}
+               if isFirst {
+                       req.Metadata = md
+                       req.TagFamilySpec = spec
+                       isFirst = false
+               }
+               gm.Expect(stream.Send(req)).Should(gm.Succeed())
+       }
+}
+
 // verifyQLWithRequest verifies that the QL file produces an equivalent 
QueryRequest to the YAML.
 func verifyQLWithRequest(innerGm gm.Gomega, args helpers.Args, yamlQuery 
*streamv1.QueryRequest, conn *grpclib.ClientConn) {
        qlContent, err := qlFS.ReadFile("input/" + args.Input + ".ql")
@@ -270,7 +302,8 @@ func WriteToGroup(conn *grpclib.ClientConn, name, group, 
fileName string, baseTi
        ctx := context.Background()
        writeClient, err := c.Write(ctx)
        gm.Expect(err).NotTo(gm.HaveOccurred())
-       loadData(writeClient, metadata, fmt.Sprintf("%s.json", fileName), 
baseTime, interval)
+       elementCounter := 0
+       loadData(writeClient, metadata, fmt.Sprintf("%s.json", fileName), 
baseTime, interval, &elementCounter)
        gm.Expect(writeClient.CloseSend()).To(gm.Succeed())
        gm.Eventually(func() error {
                _, err := writeClient.Recv()
@@ -278,70 +311,52 @@ func WriteToGroup(conn *grpclib.ClientConn, name, group, 
fileName string, baseTi
        }, flags.EventuallyTimeout).Should(gm.Equal(io.EOF))
 }
 
-// SpecWithData pairs a TagFamilySpec with a data file.
-type SpecWithData struct {
+// WriteSpec defines the specification for writing stream data.
+type WriteSpec struct {
+       Metadata *commonv1.Metadata
        DataFile string
        Spec     []*streamv1.TagFamilySpec
 }
 
-// WriteWithSpec writes stream data using multiple tag_family_specs to specify 
tag names.
-func WriteWithSpec(conn *grpclib.ClientConn, name, group string,
-       baseTime time.Time, interval time.Duration, specDataPairs 
...SpecWithData,
-) {
+// WriteMixed writes stream data in schema order first, and then in spec order.
+func WriteMixed(conn *grpclib.ClientConn, baseTime time.Time, interval 
time.Duration, writeSpecs ...WriteSpec) {
        ctx := context.Background()
-       md := &commonv1.Metadata{
-               Name:  name,
-               Group: group,
-       }
-
        schemaClient := databasev1.NewStreamRegistryServiceClient(conn)
-       resp, err := schemaClient.Get(ctx, 
&databasev1.StreamRegistryServiceGetRequest{Metadata: md})
-       gm.Expect(err).NotTo(gm.HaveOccurred())
-       md = resp.GetStream().GetMetadata()
-
        c := streamv1.NewStreamServiceClient(conn)
        writeClient, err := c.Write(ctx)
        gm.Expect(err).NotTo(gm.HaveOccurred())
 
-       isFirstRequest := true
+       var currentMd *commonv1.Metadata
        elementCounter := 0
        currentTime := baseTime
-       for _, pair := range specDataPairs {
+       for idx, ws := range writeSpecs {
+               if ws.Metadata != nil {
+                       resp, getErr := schemaClient.Get(ctx, 
&databasev1.StreamRegistryServiceGetRequest{Metadata: ws.Metadata})
+                       gm.Expect(getErr).NotTo(gm.HaveOccurred())
+                       currentMd = resp.GetStream().GetMetadata()
+               }
+               gm.Expect(currentMd).NotTo(gm.BeNil(), "first WriteSpec must 
have Metadata")
+
                var templates []interface{}
-               content, err := dataFS.ReadFile("testdata/" + pair.DataFile)
-               gm.Expect(err).ShouldNot(gm.HaveOccurred())
+               content, readErr := dataFS.ReadFile("testdata/" + ws.DataFile)
+               gm.Expect(readErr).ShouldNot(gm.HaveOccurred())
                gm.Expect(json.Unmarshal(content, 
&templates)).ShouldNot(gm.HaveOccurred())
 
-               isFirstForSpec := true
-               for i, template := range templates {
-                       rawElementValue, errMarshal := json.Marshal(template)
-                       gm.Expect(errMarshal).ShouldNot(gm.HaveOccurred())
-                       elementValue := &streamv1.ElementValue{}
-                       gm.Expect(protojson.Unmarshal(rawElementValue, 
elementValue)).ShouldNot(gm.HaveOccurred())
-                       elementValue.ElementId = strconv.Itoa(elementCounter)
-                       elementValue.Timestamp = 
timestamppb.New(currentTime.Add(time.Duration(i) * interval))
-
-                       req := &streamv1.WriteRequest{
-                               Element:   elementValue,
-                               MessageId: uint64(time.Now().UnixNano()),
-                       }
-                       if isFirstRequest {
-                               req.Metadata = md
-                               isFirstRequest = false
+               if idx == 0 {
+                       loadData(writeClient, currentMd, ws.DataFile, 
currentTime, interval, &elementCounter)
+               } else {
+                       var mdToSend *commonv1.Metadata
+                       if ws.Metadata != nil {
+                               mdToSend = currentMd
                        }
-                       if isFirstForSpec {
-                               req.TagFamilySpec = pair.Spec
-                               isFirstForSpec = false
-                       }
-                       gm.Expect(writeClient.Send(req)).Should(gm.Succeed())
-                       elementCounter++
+                       loadDataWithSpec(writeClient, mdToSend, ws.DataFile, 
currentTime, interval, ws.Spec, &elementCounter)
                }
                currentTime = currentTime.Add(time.Duration(len(templates)) * 
interval)
        }
 
        gm.Expect(writeClient.CloseSend()).To(gm.Succeed())
        gm.Eventually(func() error {
-               _, err := writeClient.Recv()
-               return err
+               _, recvErr := writeClient.Recv()
+               return recvErr
        }, flags.EventuallyTimeout).Should(gm.Equal(io.EOF))
 }
diff --git a/test/cases/stream/data/input/write_spec.ql 
b/test/cases/stream/data/input/write_mixed.ql
similarity index 87%
rename from test/cases/stream/data/input/write_spec.ql
rename to test/cases/stream/data/input/write_mixed.ql
index f057350a..a88bc60f 100644
--- a/test/cases/stream/data/input/write_spec.ql
+++ b/test/cases/stream/data/input/write_mixed.ql
@@ -16,6 +16,6 @@
 # under the License.
 
 
-SELECT trace_id, state, duration, status_code, span_id FROM STREAM sw IN 
default-spec
+SELECT trace_id, state, duration, status_code, span_id FROM STREAM sw IN 
default-spec, default-spec2
 TIME > '-15m'
-WHERE trace_id IN ('spec_trace_2', 'spec_trace_5')
+WHERE trace_id IN ('schema_trace_1', 'schema_trace_3', 'spec_trace_2', 
'spec_trace_5')
diff --git a/test/cases/stream/data/input/write_spec.yaml 
b/test/cases/stream/data/input/write_mixed.yaml
similarity index 89%
rename from test/cases/stream/data/input/write_spec.yaml
rename to test/cases/stream/data/input/write_mixed.yaml
index 00fd8854..331a3e5d 100644
--- a/test/cases/stream/data/input/write_spec.yaml
+++ b/test/cases/stream/data/input/write_mixed.yaml
@@ -16,7 +16,7 @@
 # under the License.
 
 name: "sw"
-groups: ["default-spec"]
+groups: ["default-spec", "default-spec2"]
 projection:
   tagFamilies:
     - name: "searchable"
@@ -27,5 +27,4 @@ criteria:
     op: "BINARY_OP_IN"
     value:
       strArray:
-        value: ["spec_trace_2", "spec_trace_5"]
-
+        value: ["schema_trace_1", "schema_trace_3", "spec_trace_2", 
"spec_trace_5"]
diff --git a/test/cases/stream/data/testdata/sw_schema_order.json 
b/test/cases/stream/data/testdata/sw_schema_order.json
new file mode 100644
index 00000000..a99e947b
--- /dev/null
+++ b/test/cases/stream/data/testdata/sw_schema_order.json
@@ -0,0 +1,45 @@
+[
+  {
+    "tags": [
+      { "str": { "value": "schema_trace_1" } },
+      { "int": { "value": 1 } },
+      { "str": { "value": "webapp_id" } },
+      { "str": { "value": "10.0.0.1_id" } },
+      { "str": { "value": "/home_id" } },
+      { "int": { "value": 1100 } },
+      { "int": { "value": 1622933202000000000 } },
+      { "str": { "value": "GET" } },
+      { "str": { "value": "200" } },
+      { "str": { "value": "schema_span_1" } }
+    ]
+  },
+  {
+    "tags": [
+      { "str": { "value": "schema_trace_2" } },
+      { "int": { "value": 0 } },
+      { "str": { "value": "webapp_id" } },
+      { "str": { "value": "10.0.0.2_id" } },
+      { "str": { "value": "/product_id" } },
+      { "int": { "value": 600 } },
+      { "int": { "value": 1622933202000000000 } },
+      { "str": { "value": "POST" } },
+      { "str": { "value": "201" } },
+      { "str": { "value": "schema_span_2" } }
+    ]
+  },
+  {
+    "tags": [
+      { "str": { "value": "schema_trace_3" } },
+      { "int": { "value": 1 } },
+      { "str": { "value": "webapp_id" } },
+      { "str": { "value": "10.0.0.3_id" } },
+      { "str": { "value": "/item_id" } },
+      { "int": { "value": 900 } },
+      { "int": { "value": 1622933202000000000 } },
+      { "str": { "value": "PUT" } },
+      { "str": { "value": "202" } },
+      { "str": { "value": "schema_span_3" } }
+    ]
+  }
+]
+
diff --git a/test/cases/stream/data/want/write_mixed.yaml 
b/test/cases/stream/data/want/write_mixed.yaml
new file mode 100644
index 00000000..85e7bff8
--- /dev/null
+++ b/test/cases/stream/data/want/write_mixed.yaml
@@ -0,0 +1,110 @@
+# Licensed to Apache Software Foundation (ASF) under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Apache Software Foundation (ASF) licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+elements:
+  - tagFamilies:
+    - name: searchable
+      tags:
+      - key: trace_id
+        value:
+          str:
+            value: schema_trace_1
+      - key: state
+        value:
+          int:
+            value: "1"
+      - key: duration
+        value:
+          int:
+            value: "1100"
+      - key: status_code
+        value:
+          str:
+            value: "200"
+      - key: span_id
+        value:
+          str:
+            value: schema_span_1
+  - tagFamilies:
+    - name: searchable
+      tags:
+      - key: trace_id
+        value:
+          str:
+            value: schema_trace_3
+      - key: state
+        value:
+          int:
+            value: "1"
+      - key: duration
+        value:
+          int:
+            value: "900"
+      - key: status_code
+        value:
+          str:
+            value: "202"
+      - key: span_id
+        value:
+          str:
+            value: schema_span_3
+  - tagFamilies:
+    - name: searchable
+      tags:
+      - key: trace_id
+        value:
+          str:
+            value: spec_trace_2
+      - key: state
+        value:
+          int:
+            value: "1"
+      - key: duration
+        value:
+          int:
+            value: "200"
+      - key: status_code
+        value:
+          str:
+            value: "201"
+      - key: span_id
+        value:
+          str:
+            value: spec_span_2
+  - tagFamilies:
+    - name: searchable
+      tags:
+      - key: trace_id
+        value:
+          str:
+            value: spec_trace_5
+      - key: state
+        value:
+          int:
+            value: "0"
+      - key: duration
+        value:
+          int:
+            value: "500"
+      - key: status_code
+        value:
+          str:
+            value: "200"
+      - key: span_id
+        value:
+          str:
+            value: spec_span_5
diff --git a/test/cases/stream/data/want/write_spec.yaml 
b/test/cases/stream/data/want/write_spec.yaml
deleted file mode 100644
index a40da633..00000000
--- a/test/cases/stream/data/want/write_spec.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to Apache Software Foundation (ASF) under one or more contributor
-# license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright
-# ownership. Apache Software Foundation (ASF) licenses this file to you under
-# the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-elements:
-  - tagFamilies:
-      - name: searchable
-        tags:
-          - key: trace_id
-            value:
-              str:
-                value: spec_trace_2
-          - key: state
-            value:
-              int:
-                value: "1"
-          - key: duration
-            value:
-              int:
-                value: "200"
-          - key: status_code
-            value:
-              str:
-                value: "201"
-          - key: span_id
-            value:
-              str:
-                value: spec_span_2
-  - tagFamilies:
-      - name: searchable
-        tags:
-          - key: trace_id
-            value:
-              str:
-                value: spec_trace_5
-          - key: state
-            value:
-              int:
-                value: "0"
-          - key: duration
-            value:
-              int:
-                value: "500"
-          - key: status_code
-            value:
-              str:
-                value: "200"
-          - key: span_id
-            value:
-              str:
-                value: spec_span_5
-
diff --git a/test/cases/stream/stream.go b/test/cases/stream/stream.go
index 9936ef50..ad1f11b7 100644
--- a/test/cases/stream/stream.go
+++ b/test/cases/stream/stream.go
@@ -93,5 +93,5 @@ var _ = g.DescribeTable("Scanning Streams", func(args 
helpers.Args) {
        g.Entry("err in arr", helpers.Args{Input: "err_in_arr", Duration: 1 * 
time.Hour, WantErr: true}),
        g.Entry("filter by non-existent tag", helpers.Args{Input: 
"filter_non_existent_tag", Duration: 1 * time.Hour, WantErr: true}),
        g.Entry("project non-existent tag", helpers.Args{Input: 
"project_non_existent_tag", Duration: 1 * time.Hour, WantErr: true}),
-       g.Entry("write spec", helpers.Args{Input: "write_spec", Duration: 1 * 
time.Hour, IgnoreElementID: true}),
+       g.Entry("write mixed", helpers.Args{Input: "write_mixed", Duration: 1 * 
time.Hour, IgnoreElementID: true}),
 )
diff --git a/test/cases/trace/data/data.go b/test/cases/trace/data/data.go
index 0ace3f05..0c024ad5 100644
--- a/test/cases/trace/data/data.go
+++ b/test/cases/trace/data/data.go
@@ -249,37 +249,30 @@ func verifyQLWithRequest(innerGm gm.Gomega, args 
helpers.Args, yamlQuery *tracev
        innerGm.Expect(ok).To(gm.BeTrue())
 }
 
-func loadData(stream tracev1.TraceService_WriteClient, metadata 
*commonv1.Metadata, dataFile string, baseTime time.Time, interval 
time.Duration) {
+func loadData(stream tracev1.TraceService_WriteClient, metadata 
*commonv1.Metadata, dataFile string, baseTime time.Time, interval 
time.Duration, version *uint64) {
        var templates []interface{}
        content, err := dataFS.ReadFile("testdata/" + dataFile)
        gm.Expect(err).ShouldNot(gm.HaveOccurred())
        gm.Expect(json.Unmarshal(content, 
&templates)).ShouldNot(gm.HaveOccurred())
+       gm.Expect(version).NotTo(gm.BeNil(), "version must be provided")
 
-       for i, template := range templates {
-               // Extract span data from template
+       for _, template := range templates {
                templateMap, ok := template.(map[string]interface{})
                gm.Expect(ok).To(gm.BeTrue())
-
-               // Get span data
                spanData, ok := templateMap["span"].(string)
                gm.Expect(ok).To(gm.BeTrue())
-
-               // Get tags data
                tagsData, ok := templateMap["tags"].([]interface{})
                gm.Expect(ok).To(gm.BeTrue())
 
-               // Convert tags to TagValue format
                var tagValues []*modelv1.TagValue
                for _, tag := range tagsData {
-                       tagBytes, err := json.Marshal(tag)
-                       gm.Expect(err).ShouldNot(gm.HaveOccurred())
+                       tagBytes, marshalErr := json.Marshal(tag)
+                       gm.Expect(marshalErr).ShouldNot(gm.HaveOccurred())
                        tagValue := &modelv1.TagValue{}
                        gm.Expect(protojson.Unmarshal(tagBytes, 
tagValue)).ShouldNot(gm.HaveOccurred())
                        tagValues = append(tagValues, tagValue)
                }
-
-               // Add timestamp tag as the last tag
-               timestamp := baseTime.Add(interval * time.Duration(i))
+               timestamp := baseTime.Add(interval * time.Duration(*version))
                timestampTag := &modelv1.TagValue{
                        Value: &modelv1.TagValue_Timestamp{
                                Timestamp: timestamppb.New(timestamp),
@@ -291,12 +284,54 @@ func loadData(stream tracev1.TraceService_WriteClient, 
metadata *commonv1.Metada
                        Metadata: metadata,
                        Tags:     tagValues,
                        Span:     []byte(spanData),
-                       Version:  uint64(i + 1),
+                       Version:  *version,
                })
+               *version++
                gm.Expect(errInner).ShouldNot(gm.HaveOccurred())
        }
 }
 
+func loadDataWithSpec(stream tracev1.TraceService_WriteClient, md 
*commonv1.Metadata, dataFile string,
+       baseTime time.Time, interval time.Duration, spec *tracev1.TagSpec, 
version *uint64,
+) {
+       var templates []interface{}
+       content, err := dataFS.ReadFile("testdata/" + dataFile)
+       gm.Expect(err).ShouldNot(gm.HaveOccurred())
+       gm.Expect(json.Unmarshal(content, 
&templates)).ShouldNot(gm.HaveOccurred())
+       gm.Expect(version).NotTo(gm.BeNil(), "version must be provided")
+
+       isFirst := true
+       for i, template := range templates {
+               templateMap, ok := template.(map[string]interface{})
+               gm.Expect(ok).To(gm.BeTrue())
+               spanData, ok := templateMap["span"].(string)
+               gm.Expect(ok).To(gm.BeTrue())
+               tagsData, ok := templateMap["tags"].([]interface{})
+               gm.Expect(ok).To(gm.BeTrue())
+
+               var tagValues []*modelv1.TagValue
+               for _, tag := range tagsData {
+                       tagBytes, marshalErr := json.Marshal(tag)
+                       gm.Expect(marshalErr).ShouldNot(gm.HaveOccurred())
+                       tagValue := &modelv1.TagValue{}
+                       gm.Expect(protojson.Unmarshal(tagBytes, 
tagValue)).ShouldNot(gm.HaveOccurred())
+                       tagValues = append(tagValues, tagValue)
+               }
+               tagValues = append(tagValues, &modelv1.TagValue{
+                       Value: &modelv1.TagValue_Timestamp{Timestamp: 
timestamppb.New(baseTime.Add(time.Duration(i) * interval))},
+               })
+
+               req := &tracev1.WriteRequest{Tags: tagValues, Span: 
[]byte(spanData), Version: *version}
+               *version++
+               if isFirst {
+                       req.Metadata = md
+                       req.TagSpec = spec
+                       isFirst = false
+               }
+               gm.Expect(stream.Send(req)).Should(gm.Succeed())
+       }
+}
+
 // Write writes trace data to the database.
 func Write(conn *grpclib.ClientConn, name string, baseTime time.Time, interval 
time.Duration) {
        WriteToGroup(conn, name, "test-trace-group", name, baseTime, interval)
@@ -319,7 +354,8 @@ func WriteToGroup(conn *grpclib.ClientConn, name, group, 
fileName string, baseTi
        ctx := context.Background()
        writeClient, err := c.Write(ctx)
        gm.Expect(err).NotTo(gm.HaveOccurred())
-       loadData(writeClient, metadata, fmt.Sprintf("%s.json", fileName), 
baseTime, interval)
+       version := uint64(1)
+       loadData(writeClient, metadata, fmt.Sprintf("%s.json", fileName), 
baseTime, interval, &version)
        gm.Expect(writeClient.CloseSend()).To(gm.Succeed())
        gm.Eventually(func() error {
                _, err := writeClient.Recv()
@@ -327,91 +363,53 @@ func WriteToGroup(conn *grpclib.ClientConn, name, group, 
fileName string, baseTi
        }, flags.EventuallyTimeout).Should(gm.Equal(io.EOF))
 }
 
-// SpecWithData pairs a TagSpec with a data file.
-type SpecWithData struct {
+// WriteSpec defines the specification for writing trace data.
+type WriteSpec struct {
+       Metadata *commonv1.Metadata
        Spec     *tracev1.TagSpec
        DataFile string
 }
 
-// WriteWithSpec writes trace data using tag_spec to specify tag names.
-func WriteWithSpec(conn *grpclib.ClientConn, name, group string,
-       baseTime time.Time, interval time.Duration, specDataPairs 
...SpecWithData,
-) {
+// WriteMixed writes trace data in schema order first, and then in spec order.
+func WriteMixed(conn *grpclib.ClientConn, baseTime time.Time, interval 
time.Duration, writeSpecs ...WriteSpec) {
        ctx := context.Background()
-       md := &commonv1.Metadata{
-               Name:  name,
-               Group: group,
-       }
-
        schemaClient := databasev1.NewTraceRegistryServiceClient(conn)
-       resp, err := schemaClient.Get(ctx, 
&databasev1.TraceRegistryServiceGetRequest{Metadata: md})
-       gm.Expect(err).NotTo(gm.HaveOccurred())
-       md = resp.GetTrace().GetMetadata()
-
        c := tracev1.NewTraceServiceClient(conn)
        writeClient, err := c.Write(ctx)
        gm.Expect(err).NotTo(gm.HaveOccurred())
 
-       isFirstRequest := true
+       var currentMd *commonv1.Metadata
        version := uint64(1)
        currentTime := baseTime
-       for _, pair := range specDataPairs {
+       for idx, ws := range writeSpecs {
+               if ws.Metadata != nil {
+                       resp, getErr := schemaClient.Get(ctx, 
&databasev1.TraceRegistryServiceGetRequest{Metadata: ws.Metadata})
+                       gm.Expect(getErr).NotTo(gm.HaveOccurred())
+                       currentMd = resp.GetTrace().GetMetadata()
+               }
+               gm.Expect(currentMd).NotTo(gm.BeNil(), "first WriteSpec must 
have Metadata")
+
                var templates []interface{}
-               content, err := dataFS.ReadFile("testdata/" + pair.DataFile)
-               gm.Expect(err).ShouldNot(gm.HaveOccurred())
+               content, readErr := dataFS.ReadFile("testdata/" + ws.DataFile)
+               gm.Expect(readErr).ShouldNot(gm.HaveOccurred())
                gm.Expect(json.Unmarshal(content, 
&templates)).ShouldNot(gm.HaveOccurred())
 
-               isFirstForSpec := true
-               for i, template := range templates {
-                       templateMap, ok := template.(map[string]interface{})
-                       gm.Expect(ok).To(gm.BeTrue())
-
-                       spanData, ok := templateMap["span"].(string)
-                       gm.Expect(ok).To(gm.BeTrue())
-
-                       tagsData, ok := templateMap["tags"].([]interface{})
-                       gm.Expect(ok).To(gm.BeTrue())
-
-                       var tagValues []*modelv1.TagValue
-                       for _, tag := range tagsData {
-                               tagBytes, err := json.Marshal(tag)
-                               gm.Expect(err).ShouldNot(gm.HaveOccurred())
-                               tagValue := &modelv1.TagValue{}
-                               gm.Expect(protojson.Unmarshal(tagBytes, 
tagValue)).ShouldNot(gm.HaveOccurred())
-                               tagValues = append(tagValues, tagValue)
-                       }
-
-                       timestamp := currentTime.Add(time.Duration(i) * 
interval)
-                       timestampTag := &modelv1.TagValue{
-                               Value: &modelv1.TagValue_Timestamp{
-                                       Timestamp: timestamppb.New(timestamp),
-                               },
+               if idx == 0 {
+                       loadData(writeClient, currentMd, ws.DataFile, 
currentTime, interval, &version)
+               } else {
+                       var mdToSend *commonv1.Metadata
+                       if ws.Metadata != nil {
+                               mdToSend = currentMd
                        }
-                       tagValues = append(tagValues, timestampTag)
-
-                       req := &tracev1.WriteRequest{
-                               Tags:    tagValues,
-                               Span:    []byte(spanData),
-                               Version: version,
-                       }
-                       if isFirstRequest {
-                               req.Metadata = md
-                               isFirstRequest = false
-                       }
-                       if isFirstForSpec {
-                               req.TagSpec = pair.Spec
-                               isFirstForSpec = false
-                       }
-                       gm.Expect(writeClient.Send(req)).Should(gm.Succeed())
-                       version++
+                       loadDataWithSpec(writeClient, mdToSend, ws.DataFile, 
currentTime, interval, ws.Spec, &version)
                }
                currentTime = currentTime.Add(time.Duration(len(templates)) * 
interval)
        }
 
        gm.Expect(writeClient.CloseSend()).To(gm.Succeed())
        gm.Eventually(func() error {
-               _, err := writeClient.Recv()
-               return err
+               _, recvErr := writeClient.Recv()
+               return recvErr
        }, flags.EventuallyTimeout).Should(gm.Equal(io.EOF))
 }
 
diff --git a/test/cases/trace/data/input/write_spec.ql 
b/test/cases/trace/data/input/write_mixed.ql
similarity index 89%
rename from test/cases/trace/data/input/write_spec.ql
rename to test/cases/trace/data/input/write_mixed.ql
index 3f9ee96e..25d35f33 100644
--- a/test/cases/trace/data/input/write_spec.ql
+++ b/test/cases/trace/data/input/write_mixed.ql
@@ -16,6 +16,6 @@
 # under the License.
 
 
-SELECT trace_id, state, duration, span_id FROM TRACE sw IN test-trace-spec
+SELECT trace_id, state, duration, span_id FROM TRACE sw IN test-trace-spec, 
test-trace-spec2
 TIME > '-15m'
-WHERE trace_id IN ('spec_trace_001', 'spec_trace_003')
+WHERE trace_id IN ('schema_trace_001', 'spec_trace_001', 'spec_trace_003')
diff --git a/test/cases/trace/data/input/write_spec.yml 
b/test/cases/trace/data/input/write_mixed.yml
similarity index 89%
rename from test/cases/trace/data/input/write_spec.yml
rename to test/cases/trace/data/input/write_mixed.yml
index 3ca0807d..c2863a67 100644
--- a/test/cases/trace/data/input/write_spec.yml
+++ b/test/cases/trace/data/input/write_mixed.yml
@@ -16,7 +16,7 @@
 # under the License.
 
 name: "sw"
-groups: ["test-trace-spec"]
+groups: ["test-trace-spec", "test-trace-spec2"]
 tag_projection: ["trace_id", "state", "duration", "span_id"]
 criteria:
   condition:
@@ -24,5 +24,4 @@ criteria:
     op: "BINARY_OP_IN"
     value:
       str_array:
-        value: ["spec_trace_001", "spec_trace_003"]
-
+        value: ["schema_trace_001", "spec_trace_001", "spec_trace_003"]
diff --git a/test/cases/trace/data/testdata/sw_schema_order.json 
b/test/cases/trace/data/testdata/sw_schema_order.json
new file mode 100644
index 00000000..b71d0db2
--- /dev/null
+++ b/test/cases/trace/data/testdata/sw_schema_order.json
@@ -0,0 +1,123 @@
+[
+  {
+    "tags": [
+      {
+        "str": {
+          "value": "schema_trace_001"
+        }
+      },
+      {
+        "int": {
+          "value": 1
+        }
+      },
+      {
+        "str": {
+          "value": "webapp_service"
+        }
+      },
+      {
+        "str": {
+          "value": "webapp_instance_1"
+        }
+      },
+      {
+        "str": {
+          "value": "/home_endpoint"
+        }
+      },
+      {
+        "int": {
+          "value": 1200
+        }
+      },
+      {
+        "str": {
+          "value": "schema_span_001_1"
+        }
+      }
+    ],
+    "span": "schema_trace_001_span_1"
+  },
+  {
+    "tags": [
+      {
+        "str": {
+          "value": "schema_trace_001"
+        }
+      },
+      {
+        "int": {
+          "value": 0
+        }
+      },
+      {
+        "str": {
+          "value": "webapp_service"
+        }
+      },
+      {
+        "str": {
+          "value": "webapp_instance_2"
+        }
+      },
+      {
+        "str": {
+          "value": "/product_endpoint"
+        }
+      },
+      {
+        "int": {
+          "value": 800
+        }
+      },
+      {
+        "str": {
+          "value": "schema_span_001_2"
+        }
+      }
+    ],
+    "span": "schema_trace_001_span_2"
+  },
+  {
+    "tags": [
+      {
+        "str": {
+          "value": "schema_trace_002"
+        }
+      },
+      {
+        "int": {
+          "value": 1
+        }
+      },
+      {
+        "str": {
+          "value": "api_service"
+        }
+      },
+      {
+        "str": {
+          "value": "api_instance_1"
+        }
+      },
+      {
+        "str": {
+          "value": "/api_endpoint"
+        }
+      },
+      {
+        "int": {
+          "value": 650
+        }
+      },
+      {
+        "str": {
+          "value": "schema_span_002_1"
+        }
+      }
+    ],
+    "span": "schema_trace_002_span_1"
+  }
+]
+
diff --git a/test/cases/trace/data/want/write_mixed.yml 
b/test/cases/trace/data/want/write_mixed.yml
new file mode 100644
index 00000000..f0bb22c4
--- /dev/null
+++ b/test/cases/trace/data/want/write_mixed.yml
@@ -0,0 +1,134 @@
+# Licensed to Apache Software Foundation (ASF) under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Apache Software Foundation (ASF) licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+traces:
+  - spans:
+    - span: schema_trace_001_span_1
+      spanId: schema_span_001_1
+      tags:
+      - key: state
+        value:
+          int:
+            value: "1"
+      - key: duration
+        value:
+          int:
+            value: "1200"
+      - key: trace_id
+        value:
+          str:
+            value: schema_trace_001
+      - key: span_id
+        value:
+          str:
+            value: schema_span_001_1
+    - span: schema_trace_001_span_2
+      spanId: schema_span_001_2
+      tags:
+      - key: state
+        value:
+          int: {}
+      - key: duration
+        value:
+          int:
+            value: "800"
+      - key: trace_id
+        value:
+          str:
+            value: schema_trace_001
+      - key: span_id
+        value:
+          str:
+            value: schema_span_001_2
+    traceId: schema_trace_001
+  - spans:
+    - span: spec_trace_001_span_1
+      spanId: spec_span_001_1
+      tags:
+      - key: state
+        value:
+          int:
+            value: "1"
+      - key: duration
+        value:
+          int:
+            value: "1100"
+      - key: trace_id
+        value:
+          str:
+            value: spec_trace_001
+      - key: span_id
+        value:
+          str:
+            value: spec_span_001_1
+    - span: spec_trace_001_span_2
+      spanId: spec_span_001_2
+      tags:
+      - key: state
+        value:
+          int: {}
+      - key: duration
+        value:
+          int:
+            value: "600"
+      - key: trace_id
+        value:
+          str:
+            value: spec_trace_001
+      - key: span_id
+        value:
+          str:
+            value: spec_span_001_2
+    traceId: spec_trace_001
+  - spans:
+    - span: spec_trace_003_span_1
+      spanId: spec_span_003_1
+      tags:
+      - key: state
+        value:
+          int: {}
+      - key: duration
+        value:
+          int:
+            value: "750"
+      - key: trace_id
+        value:
+          str:
+            value: spec_trace_003
+      - key: span_id
+        value:
+          str:
+            value: spec_span_003_1
+    - span: spec_trace_003_span_2
+      spanId: spec_span_003_2
+      tags:
+      - key: state
+        value:
+          int: {}
+      - key: duration
+        value:
+          int:
+            value: "450"
+      - key: trace_id
+        value:
+          str:
+            value: spec_trace_003
+      - key: span_id
+        value:
+          str:
+            value: spec_span_003_2
+    traceId: spec_trace_003
diff --git a/test/cases/trace/data/want/write_spec.yml 
b/test/cases/trace/data/want/write_spec.yml
deleted file mode 100644
index b0d6305c..00000000
--- a/test/cases/trace/data/want/write_spec.yml
+++ /dev/null
@@ -1,96 +0,0 @@
-# Licensed to Apache Software Foundation (ASF) under one or more contributor
-# license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright
-# ownership. Apache Software Foundation (ASF) licenses this file to you under
-# the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-traces:
-  - spans:
-      - span: spec_trace_001_span_1
-        spanId: spec_span_001_1
-        tags:
-          - key: state
-            value:
-              int:
-                value: "1"
-          - key: duration
-            value:
-              int:
-                value: "1100"
-          - key: trace_id
-            value:
-              str:
-                value: spec_trace_001
-          - key: span_id
-            value:
-              str:
-                value: spec_span_001_1
-      - span: spec_trace_001_span_2
-        spanId: spec_span_001_2
-        tags:
-          - key: state
-            value:
-              int: {}
-          - key: duration
-            value:
-              int:
-                value: "600"
-          - key: trace_id
-            value:
-              str:
-                value: spec_trace_001
-          - key: span_id
-            value:
-              str:
-                value: spec_span_001_2
-    traceId: spec_trace_001
-  - spans:
-      - span: spec_trace_003_span_1
-        spanId: spec_span_003_1
-        tags:
-          - key: state
-            value:
-              int: {}
-          - key: duration
-            value:
-              int:
-                value: "750"
-          - key: trace_id
-            value:
-              str:
-                value: spec_trace_003
-          - key: span_id
-            value:
-              str:
-                value: spec_span_003_1
-      - span: spec_trace_003_span_2
-        spanId: spec_span_003_2
-        tags:
-          - key: state
-            value:
-              int: {}
-          - key: duration
-            value:
-              int:
-                value: "450"
-          - key: trace_id
-            value:
-              str:
-                value: spec_trace_003
-          - key: span_id
-            value:
-              str:
-                value: spec_span_003_2
-    traceId: spec_trace_003
-
diff --git a/test/cases/trace/trace.go b/test/cases/trace/trace.go
index e99f25a8..5a51c5c3 100644
--- a/test/cases/trace/trace.go
+++ b/test/cases/trace/trace.go
@@ -61,5 +61,5 @@ var _ = g.DescribeTable("Scanning Traces", func(args 
helpers.Args) {
        g.Entry("multi-groups: sort by duration", helpers.Args{Input: 
"multi_group_sort_duration", Duration: 1 * time.Hour}),
        g.Entry("filter by non-existent tag", helpers.Args{Input: 
"filter_non_existent_tag", Duration: 1 * time.Hour, WantErr: true}),
        g.Entry("project non-existent tag", helpers.Args{Input: 
"project_non_existent_tag", Duration: 1 * time.Hour, WantErr: true}),
-       g.Entry("write spec", helpers.Args{Input: "write_spec", Duration: 1 * 
time.Hour, DisOrder: true}),
+       g.Entry("write mixed", helpers.Args{Input: "write_mixed", Duration: 1 * 
time.Hour, DisOrder: true}),
 )

Reply via email to