This is an automated email from the ASF dual-hosted git repository.
chaokunyang pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/fory.git
The following commit(s) were added to refs/heads/main by this push:
new 5c3fa4985 feat(go): metashare mode support collection and map and
nested object (#2643)
5c3fa4985 is described below
commit 5c3fa498532982783c650b820778c85fdcf5c39d
Author: Zhong Junjie <[email protected]>
AuthorDate: Tue Sep 23 11:21:14 2025 +0800
feat(go): metashare mode support collection and map and nested object
(#2643)
<!--
**Thanks for contributing to Apache Fory™.**
**If this is your first time opening a PR on fory, you can refer to
[CONTRIBUTING.md](https://github.com/apache/fory/blob/main/CONTRIBUTING.md).**
Contribution Checklist
- The **Apache Fory™** community has requirements on the naming of pr
titles. You can also find instructions in
[CONTRIBUTING.md](https://github.com/apache/fory/blob/main/CONTRIBUTING.md).
- Apache Fory™ has a strong focus on performance. If the PR you submit
will have an impact on performance, please benchmark it first and
provide the benchmark result here.
-->
## Why?
<!-- Describe the purpose of this PR. -->
## What does this PR do?
<!-- Describe the details of this PR. -->
* Extended type-definition support so collection and map field metadata
now carries elements type information. Corresponding encoder/decoder
logic and tests were refreshed to cover collection/map scenarios
(fory/type_def.go, fory/type_def_encoder.go,
fory/type_def_encoder_test.go).
* Prevents mismatched assignments when schema evolution changes element
or value types (fory/struct.go).
* The meta-share test file was reorganized into a table-driven harness
that exercises type inconsistencies across slices, maps, and now nested
structs (fory/fory_metashare_test.go).
things remain to do:
- [ ] more test to discover potential issues(like pointer type)
- [ ] xlang test with python
## Related issues
#2192
## Does this PR introduce any user-facing change?
<!--
If any user-facing interface changes, please [open an
issue](https://github.com/apache/fory/issues/new/choose) describing the
need to do so and update the document if necessary.
Delete section if not applicable.
-->
- [x] Does this PR introduce any public API change? no
- [x] Does this PR introduce any binary protocol compatibility change?
no
## Benchmark
<!--
When the PR has an impact on performance (if you don't know whether the
PR will have an impact on performance, you can submit the PR first, and
if it will have impact on performance, the code reviewer will explain
it), be sure to attach a benchmark data here.
Delete section if not applicable.
-->
---
.gitignore | 1 +
go/fory/fory.go | 11 +-
go/fory/fory_metashare_test.go | 420 +++++++++++++++++++++++++++++----------
go/fory/struct.go | 103 ++++++++--
go/fory/type_def.go | 211 ++++++++++++++++----
go/fory/type_def_encoder.go | 4 +-
go/fory/type_def_encoder_test.go | 173 ++++++++++++----
7 files changed, 711 insertions(+), 212 deletions(-)
diff --git a/.gitignore b/.gitignore
index ca557d35e..682625a42 100644
--- a/.gitignore
+++ b/.gitignore
@@ -41,3 +41,4 @@ MODULE.bazel.lock
.DS_Store
**/.DS_Store
.vscode/
+go/fory/.gocache
\ No newline at end of file
diff --git a/go/fory/fory.go b/go/fory/fory.go
index fc3b9eab3..abae4ef9b 100644
--- a/go/fory/fory.go
+++ b/go/fory/fory.go
@@ -245,7 +245,7 @@ func (f *Fory) Write(buffer *ByteBuffer, v interface{})
(err error) {
case byte: // uint8
f.WriteByte_(buffer, v)
default:
- err = f.WriteReferencable(buffer, reflect.ValueOf(v))
+ err = f.WriteReferencable_(buffer, reflect.ValueOf(v))
}
return
}
@@ -304,12 +304,13 @@ func (f *Fory) readLength(buffer *ByteBuffer) int {
return int(buffer.ReadVarInt32())
}
-func (f *Fory) WriteReferencable(buffer *ByteBuffer, value reflect.Value)
error {
+func (f *Fory) WriteReferencable_(buffer *ByteBuffer, value reflect.Value)
error {
metaOffset := buffer.writerIndex
if f.compatible {
buffer.WriteInt32(-1)
}
- if err := f.writeReferencableBySerializer(buffer, value, nil); err !=
nil {
+ err := f.WriteReferencable(buffer, value)
+ if err != nil {
return err
}
if f.compatible && f.metaContext != nil &&
len(f.metaContext.writingTypeDefs) > 0 {
@@ -319,6 +320,10 @@ func (f *Fory) WriteReferencable(buffer *ByteBuffer, value
reflect.Value) error
return nil
}
+func (f *Fory) WriteReferencable(buffer *ByteBuffer, value reflect.Value)
error {
+ return f.writeReferencableBySerializer(buffer, value, nil)
+}
+
func (f *Fory) writeReferencableBySerializer(buffer *ByteBuffer, value
reflect.Value, serializer Serializer) error {
if refWritten, err := f.refResolver.WriteRefOrNull(buffer, value); err
== nil && !refWritten {
// check ptr
diff --git a/go/fory/fory_metashare_test.go b/go/fory/fory_metashare_test.go
index cc71c2639..2e2dd16bd 100644
--- a/go/fory/fory_metashare_test.go
+++ b/go/fory/fory_metashare_test.go
@@ -18,6 +18,7 @@
package fory
import (
+ "reflect"
"testing"
"github.com/stretchr/testify/assert"
@@ -30,6 +31,11 @@ type SimpleDataClass struct {
Age int32
Active bool
}
+type InconsistentDataClass struct {
+ Name int32 // Different type
+ Age int32
+ Active bool
+}
type ExtendedDataClass struct {
Name string
Age int32
@@ -43,6 +49,48 @@ type ReducedDataClass struct {
// Missing 'active' field
}
+type SliceDataClass struct {
+ Name string
+ Items []string
+ Nums []int32
+}
+
+type MapDataClass struct {
+ Name string
+ Metadata map[string]string
+ Counters map[string]int32
+}
+
+type UnsortedStruct struct {
+ StringField string
+ FloatField float64
+ BoolField bool
+ IntField int32
+ ByteField byte
+}
+
+type InconsistentSliceDataClass struct {
+ Name string
+ Items []int32 // Different element type
+ Nums []int32
+}
+
+type InconsistentMapDataClass struct {
+ Name string
+ Metadata map[string]int32 // Different value type
+ Counters map[int32]int32 // Different key type
+}
+
+type NestedOuter struct {
+ Name string
+ Inner SimpleDataClass
+}
+
+type NestedOuterIncompatible struct {
+ Name string
+ Inner InconsistentDataClass
+}
+
func TestMetaShareEnabled(t *testing.T) {
fory := NewForyWithOptions(WithCompatible(true))
@@ -58,124 +106,280 @@ func TestMetaShareDisabled(t *testing.T) {
assert.Nil(t, fory.metaContext, "Expected metaContext to be nil when
compatible=false")
}
-func TestSimpleDataClassSerialization(t *testing.T) {
- fory := NewForyWithOptions(WithCompatible(true))
-
- // Register the struct
- err := fory.RegisterTagType("SimpleDataClass", SimpleDataClass{})
- assert.NoError(t, err, "Failed to register type")
-
- obj := SimpleDataClass{Name: "test", Age: 25, Active: true}
-
- // Serialize
- data, err := fory.Marshal(obj)
- assert.NoError(t, err, "Failed to marshal")
-
- // Deserialize
- var deserialized SimpleDataClass
- err = fory.Unmarshal(data, &deserialized)
- assert.NoError(t, err, "Failed to unmarshal")
-
- // Verify
- assert.Equal(t, obj.Name, deserialized.Name)
- assert.Equal(t, obj.Age, deserialized.Age)
- assert.Equal(t, obj.Active, deserialized.Active)
-}
-
-func TestFieldSortingOrder(t *testing.T) {
- fory := NewForyWithOptions(WithCompatible(true))
-
- // Create a struct with fields in non-optimal order (only implemented
types)
- // the final order should be: FloatField, IntField, BoolField,
ByteField, StringField
- type UnsortedStruct struct {
- StringField string
- FloatField float64
- BoolField bool
- IntField int32
- ByteField byte
+func TestCompatibleSerializationScenarios(t *testing.T) {
+ cases := []compatibilityCase{
+ {
+ name: "SimpleRoundTrip",
+ tag: "SimpleDataClass",
+ writeType: SimpleDataClass{},
+ readType: SimpleDataClass{},
+ input: SimpleDataClass{Name: "test", Age: 25,
Active: true},
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(SimpleDataClass)
+ out := output.(SimpleDataClass)
+ assert.Equal(t, in.Age, out.Age)
+ assert.Equal(t, in.Active, out.Active)
+ assert.Equal(t, in.Name, out.Name)
+ },
+ },
+ {
+ name: "InconsistentTypeFallsBackToZeroValue",
+ tag: "TestStruct",
+ writeType: SimpleDataClass{},
+ readType: InconsistentDataClass{},
+ input: SimpleDataClass{Name: "test", Age: 25,
Active: true},
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(SimpleDataClass)
+ out := output.(InconsistentDataClass)
+ assert.Zero(t, out.Name)
+ assert.Equal(t, in.Age, out.Age)
+ assert.Equal(t, in.Active, out.Active)
+ },
+ },
+ {
+ name: "FieldSorting",
+ tag: "UnsortedStruct",
+ writeType: UnsortedStruct{},
+ readType: UnsortedStruct{},
+ input: UnsortedStruct{
+ StringField: "test",
+ FloatField: 3.14,
+ BoolField: true,
+ IntField: 42,
+ ByteField: 255,
+ },
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(UnsortedStruct)
+ out := output.(UnsortedStruct)
+ assert.Equal(t, in.FloatField, out.FloatField)
+ assert.Equal(t, in.IntField, out.IntField)
+ assert.Equal(t, in.BoolField, out.BoolField)
+ assert.Equal(t, in.ByteField, out.ByteField)
+ assert.Equal(t, in.StringField, out.StringField)
+ },
+ },
+ {
+ name: "SchemaEvolutionAddField",
+ tag: "TestStructAdd",
+ writeType: SimpleDataClass{},
+ readType: ExtendedDataClass{},
+ input: SimpleDataClass{Name: "test", Age: 25,
Active: true},
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(SimpleDataClass)
+ out := output.(ExtendedDataClass)
+ assert.Equal(t, in.Name, out.Name)
+ assert.Equal(t, in.Age, out.Age)
+ assert.Equal(t, in.Active, out.Active)
+ assert.Equal(t, "", out.Email)
+ },
+ },
+ {
+ name: "SchemaEvolutionRemoveField",
+ tag: "TestStructRemove",
+ writeType: SimpleDataClass{},
+ readType: ReducedDataClass{},
+ input: SimpleDataClass{Name: "test", Age: 25,
Active: true},
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(SimpleDataClass)
+ out := output.(ReducedDataClass)
+ assert.Equal(t, in.Name, out.Name)
+ assert.Equal(t, in.Age, out.Age)
+ },
+ },
+ {
+ name: "SliceFields",
+ tag: "SliceDataClass",
+ writeType: SliceDataClass{},
+ readType: SliceDataClass{},
+ input: SliceDataClass{
+ Name: "test",
+ Items: []string{"item1", "item2", "item3"},
+ Nums: []int32{10, 20, 30, 40},
+ },
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(SliceDataClass)
+ out := output.(SliceDataClass)
+ assert.Equal(t, in.Name, out.Name)
+ assert.Equal(t, in.Items, out.Items)
+ assert.Equal(t, in.Nums, out.Nums)
+ },
+ },
+ {
+ name: "InconsistentSliceElements",
+ tag: "SliceDataClass",
+ writeType: SliceDataClass{},
+ readType: InconsistentSliceDataClass{},
+ input: SliceDataClass{
+ Name: "test",
+ Items: []string{"item1", "item2"},
+ Nums: []int32{1, 2, 3},
+ },
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(SliceDataClass)
+ out := output.(InconsistentSliceDataClass)
+ assert.Equal(t, in.Name, out.Name)
+ assert.Nil(t, out.Items)
+ assert.Equal(t, in.Nums, out.Nums)
+ },
+ },
+ {
+ name: "MapFields",
+ tag: "MapDataClass",
+ writeType: MapDataClass{},
+ readType: MapDataClass{},
+ input: MapDataClass{
+ Name: "test",
+ Metadata: map[string]string{
+ "version": "1.0",
+ "author": "test_user",
+ "env": "production",
+ },
+ Counters: map[string]int32{
+ "requests": 100,
+ "errors": 5,
+ "success": 95,
+ },
+ },
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(MapDataClass)
+ out := output.(MapDataClass)
+ assert.Equal(t, in.Name, out.Name)
+ assert.Equal(t, len(in.Metadata),
len(out.Metadata))
+ assert.Equal(t, in.Metadata, out.Metadata)
+ assert.Equal(t, len(in.Counters),
len(out.Counters))
+ assert.Equal(t, in.Counters, out.Counters)
+ },
+ },
+ {
+ name: "InconsistentMapValues",
+ tag: "MapDataClass",
+ writeType: MapDataClass{},
+ readType: InconsistentMapDataClass{},
+ input: MapDataClass{
+ Name: "test",
+ Metadata: map[string]string{
+ "key1": "value1",
+ "key2": "value2",
+ },
+ Counters: map[string]int32{
+ "c1": 10,
+ "c2": 20,
+ },
+ },
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(MapDataClass)
+ out := output.(InconsistentMapDataClass)
+ assert.Equal(t, in.Name, out.Name)
+ assert.Nil(t, out.Metadata)
+ assert.Nil(t, out.Counters)
+ },
+ },
+ {
+ name: "NestedStruct",
+ tag: "NestedOuter",
+ writeType: NestedOuter{},
+ readType: NestedOuter{},
+ input: NestedOuter{
+ Name: "outer",
+ Inner: SimpleDataClass{Name: "inner", Age: 18,
Active: true},
+ },
+ writerSetup: func(f *Fory) error {
+ if err := f.RegisterTagType("SimpleDataClass",
SimpleDataClass{}); err != nil {
+ return err
+ }
+ return nil
+ },
+ readerSetup: func(f *Fory) error {
+ if err := f.RegisterTagType("SimpleDataClass",
SimpleDataClass{}); err != nil {
+ return err
+ }
+ return nil
+ },
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(NestedOuter)
+ out := output.(NestedOuter)
+ assert.Equal(t, in.Name, out.Name)
+ assert.Equal(t, in.Inner, out.Inner)
+ },
+ },
+ {
+ name: "NestedStructIncompatible",
+ tag: "NestedOuter",
+ writeType: NestedOuter{},
+ readType: NestedOuterIncompatible{},
+ input: NestedOuter{
+ Name: "outer",
+ Inner: SimpleDataClass{Name: "inner", Age: 18,
Active: true},
+ },
+ writerSetup: func(f *Fory) error {
+ if err := f.RegisterTagType("SimpleDataClass",
SimpleDataClass{}); err != nil {
+ return err
+ }
+ return nil
+ },
+ readerSetup: func(f *Fory) error {
+ if err := f.RegisterTagType("SimpleDataClass",
InconsistentDataClass{}); err != nil {
+ return err
+ }
+ return nil
+ },
+ assertFunc: func(t *testing.T, input interface{},
output interface{}) {
+ in := input.(NestedOuter)
+ out := output.(NestedOuterIncompatible)
+ assert.Equal(t, in.Name, out.Name)
+ assert.Zero(t, out.Inner.Name)
+ assert.Equal(t, in.Inner.Age, out.Inner.Age)
+ assert.Equal(t, in.Inner.Active,
out.Inner.Active)
+ },
+ },
}
- err := fory.RegisterTagType("UnsortedStruct", UnsortedStruct{})
- assert.NoError(t, err, "Failed to register type")
-
- obj := UnsortedStruct{
- StringField: "test",
- FloatField: 3.14,
- BoolField: true,
- IntField: 42,
- ByteField: 255,
+ for _, tc := range cases {
+ t.Run(tc.name, func(t *testing.T) {
+ runCompatibilityCase(t, tc)
+ })
}
-
- // Serialize
- data, err := fory.Marshal(obj)
- assert.NoError(t, err, "Failed to marshal")
-
- // Deserialize
- var deserialized UnsortedStruct
- err = fory.Unmarshal(data, &deserialized)
- assert.NoError(t, err, "Failed to unmarshal")
-
- // Verify all fields are correctly serialized/deserialized regardless
of order
- assert.Equal(t, obj.FloatField, deserialized.FloatField)
- assert.Equal(t, obj.IntField, deserialized.IntField)
- assert.Equal(t, obj.BoolField, deserialized.BoolField)
- assert.Equal(t, obj.ByteField, deserialized.ByteField)
- assert.Equal(t, obj.StringField, deserialized.StringField)
-
- t.Logf("Field sorting test passed - optimal order is applied during
field definition creation")
}
-func TestSchemaEvolutionAddField(t *testing.T) {
- // Test adding fields to existing struct using predefined types
-
- // Serialize with SimpleDataClass (3 fields)
- fory1 := NewForyWithOptions(WithCompatible(true))
- err := fory1.RegisterTagType("TestStruct", SimpleDataClass{})
- assert.NoError(t, err, "Failed to register SimpleDataClass")
-
- originalObj := SimpleDataClass{Name: "test", Age: 25, Active: true}
- data, err := fory1.Marshal(originalObj)
- assert.NoError(t, err, "Failed to marshal SimpleDataClass")
-
- // Deserialize with ExtendedDataClass (4 fields - adds Email field)
- fory2 := NewForyWithOptions(WithCompatible(true))
- err = fory2.RegisterTagType("TestStruct", ExtendedDataClass{})
- assert.NoError(t, err, "Failed to register ExtendedDataClass")
-
- var deserialized ExtendedDataClass
- err = fory2.Unmarshal(data, &deserialized)
- assert.NoError(t, err, "Failed to unmarshal to ExtendedDataClass")
-
- // Verify common fields and default value for new field
- assert.Equal(t, originalObj.Name, deserialized.Name)
- assert.Equal(t, originalObj.Age, deserialized.Age)
- assert.Equal(t, originalObj.Active, deserialized.Active)
- assert.Equal(t, "", deserialized.Email, "Expected Email to be its
default value (empty string)")
+type compatibilityCase struct {
+ name string
+ tag string
+ writeType interface{}
+ readType interface{}
+ input interface{}
+ assertFunc func(t *testing.T, input interface{}, output interface{})
+ writerSetup func(*Fory) error
+ readerSetup func(*Fory) error
}
-func TestSchemaEvolutionRemoveField(t *testing.T) {
- // Test removing fields from existing struct using predefined types
+func runCompatibilityCase(t *testing.T, tc compatibilityCase) {
+ t.Helper()
- // Serialize with SimpleDataClass (3 fields)
- fory1 := NewForyWithOptions(WithCompatible(true))
- err := fory1.RegisterTagType("TestStruct", SimpleDataClass{})
- assert.NoError(t, err, "Failed to register SimpleDataClass")
+ writer := NewForyWithOptions(WithCompatible(true))
+ if tc.writerSetup != nil {
+ err := tc.writerSetup(writer)
+ assert.NoError(t, err)
+ }
+ err := writer.RegisterTagType(tc.tag, tc.writeType)
+ assert.NoError(t, err)
- originalObj := SimpleDataClass{Name: "test", Age: 25, Active: true}
- data, err := fory1.Marshal(originalObj)
- assert.NoError(t, err, "Failed to marshal SimpleDataClass")
+ data, err := writer.Marshal(tc.input)
+ assert.NoError(t, err)
- // Deserialize with ReducedDataClass (2 fields - removes Active)
- fory2 := NewForyWithOptions(WithCompatible(true))
- err = fory2.RegisterTagType("TestStruct", ReducedDataClass{})
- assert.NoError(t, err, "Failed to register ReducedDataClass")
+ reader := NewForyWithOptions(WithCompatible(true))
+ if tc.readerSetup != nil {
+ err = tc.readerSetup(reader)
+ assert.NoError(t, err)
+ }
+ err = reader.RegisterTagType(tc.tag, tc.readType)
+ assert.NoError(t, err)
- var deserialized ReducedDataClass
- err = fory2.Unmarshal(data, &deserialized)
- assert.NoError(t, err, "Failed to unmarshal to ReducedDataClass")
+ target := reflect.New(reflect.TypeOf(tc.readType))
+ var unmarshalErr error
+ assert.NotPanics(t, func() {
+ unmarshalErr = reader.Unmarshal(data, target.Interface())
+ })
+ assert.NoError(t, unmarshalErr)
- // Verify common fields
- assert.Equal(t, originalObj.Name, deserialized.Name)
- assert.Equal(t, originalObj.Age, deserialized.Age)
- // Active field is removed, so it should not be present in deserialized
ReducedDataClass
+ tc.assertFunc(t, tc.input, target.Elem().Interface())
}
diff --git a/go/fory/struct.go b/go/fory/struct.go
index 2fd679a35..02b230279 100644
--- a/go/fory/struct.go
+++ b/go/fory/struct.go
@@ -160,19 +160,23 @@ func createStructFieldInfos(f *Fory, type_ reflect.Type)
(structFieldsInfo, erro
if field.Type.Kind() == reflect.Interface {
field.Type = reflect.ValueOf(field.Type).Elem().Type()
}
- fieldSerializer, _ :=
f.typeResolver.getSerializerByType(field.Type, true)
- if field.Type.Kind() == reflect.Array {
- // When a struct field is an array type,
- // retrieve its corresponding slice serializer and
populate it into fieldInfo for reuse.
- elemType := field.Type.Elem()
- sliceType := reflect.SliceOf(elemType)
- fieldSerializer =
f.typeResolver.typeToSerializers[sliceType]
- } else if field.Type.Kind() == reflect.Slice {
- // If the field is a concrete slice type, dynamically
create a valid serializer
- // so it has the potential and capability to use
readSameTypes function.
- if field.Type.Elem().Kind() != reflect.Interface {
- fieldSerializer = sliceSerializer{
-
f.typeResolver.typesInfo[field.Type.Elem()],
+ var fieldSerializer Serializer
+ if field.Type.Kind() != reflect.Struct {
+ var _ error
+ fieldSerializer, _ =
f.typeResolver.getSerializerByType(field.Type, true)
+ if field.Type.Kind() == reflect.Array {
+ // When a struct field is an array type,
+ // retrieve its corresponding slice serializer
and populate it into fieldInfo for reuse.
+ elemType := field.Type.Elem()
+ sliceType := reflect.SliceOf(elemType)
+ fieldSerializer =
f.typeResolver.typeToSerializers[sliceType]
+ } else if field.Type.Kind() == reflect.Slice {
+ // If the field is a concrete slice type,
dynamically create a valid serializer
+ // so it has the potential and capability to
use readSameTypes function.
+ if field.Type.Elem().Kind() !=
reflect.Interface {
+ fieldSerializer = sliceSerializer{
+
f.typeResolver.typesInfo[field.Type.Elem()],
+ }
}
}
}
@@ -241,19 +245,26 @@ func createStructFieldInfosFromFieldDefs(f *Fory,
fieldDefs []FieldDef, type_ re
for i, def := range fieldDefs {
current_field_names[def.name] = i
+ fieldTypeFromDef, err := resolveFieldDefType(f, def)
+ if err != nil {
+ return nil, err
+ }
+
fieldIndex := -1 // Default to -1 if field doesn't exist in
current struct
var fieldType reflect.Type
+ var structField reflect.StructField
if structFieldIndex, exists := fieldNameToIndex[def.name];
exists {
- fieldIndex = structFieldIndex
- fieldType = type_.Field(structFieldIndex).Type
- } else {
- // Field doesn't exist in current struct version, we
need the type from FieldDef
- if info, exists :=
f.typeResolver.typeIDToTypeInfo[int32(def.fieldType.TypeId())]; exists {
- fieldType = info.Type
+ structField = type_.Field(structFieldIndex)
+ fieldType = fieldTypeFromDef
+ if typesCompatible(structField.Type, fieldTypeFromDef) {
+ fieldIndex = structFieldIndex
+ fieldType = structField.Type
} else {
- return nil, fmt.Errorf("unknown type for field
%s with typeId %d", def.name, def.fieldType.TypeId())
+ fieldType = fieldTypeFromDef
}
+ } else {
+ fieldType = fieldTypeFromDef
}
fieldSerializer, err := def.fieldType.getSerializer(f)
@@ -263,6 +274,7 @@ func createStructFieldInfosFromFieldDefs(f *Fory, fieldDefs
[]FieldDef, type_ re
fieldInfo := &fieldInfo{
name: def.name,
+ field: structField,
fieldIndex: fieldIndex,
type_: fieldType,
referencable: def.nullable,
@@ -275,6 +287,57 @@ func createStructFieldInfosFromFieldDefs(f *Fory,
fieldDefs []FieldDef, type_ re
return fields, nil
}
+func resolveFieldDefType(f *Fory, def FieldDef) (reflect.Type, error) {
+ typeInfo, err := def.fieldType.getTypeInfo(f)
+ if err != nil {
+ return nil, fmt.Errorf("unknown type for field %s with typeId
%d: %w", def.name, def.fieldType.TypeId(), err)
+ }
+ if typeInfo.Type == nil {
+ return nil, fmt.Errorf("type information missing for field %s
with typeId %d", def.name, def.fieldType.TypeId())
+ }
+ return typeInfo.Type, nil
+}
+
+func typesCompatible(actual, expected reflect.Type) bool {
+ if actual == nil || expected == nil {
+ return false
+ }
+ if actual == expected {
+ return true
+ }
+ if actual.AssignableTo(expected) || expected.AssignableTo(actual) {
+ return true
+ }
+ if actual.Kind() == reflect.Ptr && actual.Elem() == expected {
+ return true
+ }
+ if expected.Kind() == reflect.Ptr && expected.Elem() == actual {
+ return true
+ }
+ if actual.Kind() == expected.Kind() {
+ switch actual.Kind() {
+ case reflect.Slice, reflect.Array:
+ return elementTypesCompatible(actual.Elem(),
expected.Elem())
+ case reflect.Map:
+ return elementTypesCompatible(actual.Key(),
expected.Key()) && elementTypesCompatible(actual.Elem(), expected.Elem())
+ }
+ }
+ return false
+}
+
+func elementTypesCompatible(actual, expected reflect.Type) bool {
+ if actual == nil || expected == nil {
+ return false
+ }
+ if actual == expected || actual.AssignableTo(expected) ||
expected.AssignableTo(actual) {
+ return true
+ }
+ if actual.Kind() == reflect.Ptr {
+ return elementTypesCompatible(actual, expected.Elem())
+ }
+ return false
+}
+
type triple struct {
typeID int16
serializer Serializer
diff --git a/go/fory/type_def.go b/go/fory/type_def.go
index 6d59bc05f..a7ab52f22 100644
--- a/go/fory/type_def.go
+++ b/go/fory/type_def.go
@@ -133,7 +133,7 @@ type FieldDef struct {
// buildFieldDefs extracts field definitions from a struct value
func buildFieldDefs(fory *Fory, value reflect.Value) ([]FieldDef, error) {
- var fieldInfos []FieldDef
+ var fieldDefs []FieldDef
type_ := value.Type()
for i := 0; i < type_.NumField(); i++ {
@@ -156,23 +156,23 @@ func buildFieldDefs(fory *Fory, value reflect.Value)
([]FieldDef, error) {
trackingRef: fory.refTracking,
fieldType: ft,
}
- fieldInfos = append(fieldInfos, fieldInfo)
+ fieldDefs = append(fieldDefs, fieldInfo)
}
// Sort field definitions
- if len(fieldInfos) > 1 {
+ if len(fieldDefs) > 1 {
// Extract serializers and names for sorting
- serializers := make([]Serializer, len(fieldInfos))
- fieldNames := make([]string, len(fieldInfos))
- for i, fieldInfo := range fieldInfos {
- serializer, err :=
fieldInfo.fieldType.getSerializer(fory)
+ serializers := make([]Serializer, len(fieldDefs))
+ fieldNames := make([]string, len(fieldDefs))
+ for i, fieldDef := range fieldDefs {
+ serializer, err :=
fieldDef.fieldType.getSerializer(fory)
if err != nil {
// If we can't get serializer, use nil (will be
handled by sortFields)
serializers[i] = nil
} else {
serializers[i] = serializer
}
- fieldNames[i] = fieldInfo.name
+ fieldNames[i] = fieldDef.name
}
// Use existing sortFields function to get optimal order
@@ -180,19 +180,19 @@ func buildFieldDefs(fory *Fory, value reflect.Value)
([]FieldDef, error) {
// Rebuild fieldInfos in the sorted order
nameToFieldInfo := make(map[string]FieldDef)
- for _, fieldInfo := range fieldInfos {
+ for _, fieldInfo := range fieldDefs {
nameToFieldInfo[fieldInfo.name] = fieldInfo
}
- sortedFieldInfos := make([]FieldDef, len(fieldInfos))
+ sortedFieldInfos := make([]FieldDef, len(fieldDefs))
for i, name := range sortedNames {
sortedFieldInfos[i] = nameToFieldInfo[name]
}
- fieldInfos = sortedFieldInfos
+ fieldDefs = sortedFieldInfos
}
- return fieldInfos, nil
+ return fieldDefs, nil
}
// FieldType interface represents different field types, including object,
collection, and map types
@@ -200,6 +200,7 @@ type FieldType interface {
TypeId() TypeId
write(*ByteBuffer)
getSerializer(*Fory) (Serializer, error)
+ getTypeInfo(*Fory) (TypeInfo, error) // some serializer need typeinfo
as well
}
// BaseFieldType provides common functionality for field types
@@ -212,36 +213,81 @@ func (b *BaseFieldType) write(buffer *ByteBuffer) {
buffer.WriteVarUint32Small7(uint32(b.typeId))
}
-func (o *BaseFieldType) getSerializer(fory *Fory) (Serializer, error) {
- if o.typeId == EXTENSION || o.typeId == STRUCT || o.typeId ==
NAMED_STRUCT ||
- o.typeId == COMPATIBLE_STRUCT || o.typeId ==
NAMED_COMPATIBLE_STRUCT || o.typeId == UNKNOWN_TYPE_ID {
- return nil, nil
- }
- info, err := fory.typeResolver.getTypeInfoById(o.typeId)
+func (b *BaseFieldType) getSerializer(fory *Fory) (Serializer, error) {
+ typeInfo, err := b.getTypeInfo(fory)
if err != nil {
return nil, err
}
- return info.Serializer, nil
+ return typeInfo.Serializer, nil
}
-// readFieldInfo reads field type info from the buffer according to the TypeId
+func (b *BaseFieldType) getTypeInfo(fory *Fory) (TypeInfo, error) {
+ info, err := fory.typeResolver.getTypeInfoById(b.typeId)
+ if err != nil {
+ return TypeInfo{}, err
+ }
+ return info, nil
+}
+
+// readFieldType reads field type info from the buffer according to the TypeId
func readFieldType(buffer *ByteBuffer) (FieldType, error) {
typeId := buffer.ReadVarUint32Small7()
- if typeId == LIST || typeId == SET {
- panic("not implement yet")
- } else if typeId == MAP {
- panic("not implement yet")
- }
- return NewObjectFieldType(TypeId(typeId)), nil
+ switch typeId {
+ case LIST, SET:
+ // Read element type recursively
+ elementType, err := readFieldType(buffer)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read element type:
%w", err)
+ }
+ return NewCollectionFieldType(TypeId(typeId), elementType), nil
+ case MAP:
+ // Read key type recursively
+ keyType, err := readFieldType(buffer)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read key type: %w",
err)
+ }
+ // Read value type recursively
+ valueType, err := readFieldType(buffer)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read value type: %w",
err)
+ }
+ return NewMapFieldType(TypeId(typeId), keyType, valueType), nil
+ case EXTENSION, STRUCT, NAMED_STRUCT, COMPATIBLE_STRUCT,
NAMED_COMPATIBLE_STRUCT:
+ return NewDynamicFieldType(TypeId(typeId)), nil
+ }
+ return NewSimpleFieldType(TypeId(typeId)), nil
}
-// CollectionFieldType represents collection types like List, Set
+// CollectionFieldType represents collection types like List, Slice
type CollectionFieldType struct {
BaseFieldType
elementType FieldType
}
+func NewCollectionFieldType(typeId TypeId, elementType FieldType)
*CollectionFieldType {
+ return &CollectionFieldType{
+ BaseFieldType: BaseFieldType{typeId: typeId},
+ elementType: elementType,
+ }
+}
+
+func (c *CollectionFieldType) write(buffer *ByteBuffer) {
+ c.BaseFieldType.write(buffer)
+ c.elementType.write(buffer)
+}
+
+func (c *CollectionFieldType) getTypeInfo(f *Fory) (TypeInfo, error) {
+ elemInfo, err := c.elementType.getTypeInfo(f)
+ elementType := elemInfo.Type
+ collectionType := reflect.SliceOf(elementType)
+ if err != nil {
+ return TypeInfo{}, err
+ }
+ sliceSerializer := &sliceSerializer{elemInfo: elemInfo}
+ return TypeInfo{Type: collectionType, Serializer: sliceSerializer}, nil
+}
+
// MapFieldType represents map types
type MapFieldType struct {
BaseFieldType
@@ -249,24 +295,111 @@ type MapFieldType struct {
valueType FieldType
}
-// ObjectFieldType represents object field types that aren't registered or
collection/map types
-type ObjectFieldType struct {
+func NewMapFieldType(typeId TypeId, keyType, valueType FieldType)
*MapFieldType {
+ return &MapFieldType{
+ BaseFieldType: BaseFieldType{typeId: typeId},
+ keyType: keyType,
+ valueType: valueType,
+ }
+}
+
+func (m *MapFieldType) write(buffer *ByteBuffer) {
+ m.BaseFieldType.write(buffer)
+ m.keyType.write(buffer)
+ m.valueType.write(buffer)
+}
+
+func (m *MapFieldType) getTypeInfo(f *Fory) (TypeInfo, error) {
+ keyInfo, err := m.keyType.getTypeInfo(f)
+ if err != nil {
+ return TypeInfo{}, err
+ }
+ valueInfo, err := m.valueType.getTypeInfo(f)
+ if err != nil {
+ return TypeInfo{}, err
+ }
+ var mapType reflect.Type
+ if keyInfo.Type != nil && valueInfo.Type != nil {
+ mapType = reflect.MapOf(keyInfo.Type, valueInfo.Type)
+ }
+ mapSerializer := &mapSerializer{
+ keySerializer: keyInfo.Serializer,
+ valueSerializer: valueInfo.Serializer,
+ }
+ return TypeInfo{Type: mapType, Serializer: mapSerializer}, nil
+}
+
+// SimpleFieldType represents object field types that aren't collection/map
types
+type SimpleFieldType struct {
BaseFieldType
}
-func NewObjectFieldType(typeId TypeId) *ObjectFieldType {
- return &ObjectFieldType{
+func NewSimpleFieldType(typeId TypeId) *SimpleFieldType {
+ return &SimpleFieldType{
BaseFieldType: BaseFieldType{
typeId: typeId,
},
}
}
-// todo: implement buildFieldType for collection and map types
-// buildFieldType builds field type from reflect.Type, handling collection,
map and object types
+// DynamicFieldType represents a field type that is determined at runtime,
like EXTENSION or STRUCT
+type DynamicFieldType struct {
+ BaseFieldType
+}
+
+func NewDynamicFieldType(typeId TypeId) *DynamicFieldType {
+ return &DynamicFieldType{
+ BaseFieldType: BaseFieldType{
+ typeId: typeId,
+ },
+ }
+}
+
+func (d *DynamicFieldType) getTypeInfo(fory *Fory) (TypeInfo, error) {
+ // leave empty for runtime resolution, we not know the actual type here
+ return TypeInfo{Type: reflect.TypeOf((*interface{})(nil)).Elem(),
Serializer: nil}, nil
+}
+
+// buildFieldType builds field type from reflect.Type, handling collection,
map recursively
func buildFieldType(fory *Fory, fieldValue reflect.Value) (FieldType, error) {
fieldType := fieldValue.Type()
+ // Handle slice and array types
+ if fieldType.Kind() == reflect.Slice || fieldType.Kind() ==
reflect.Array {
+ // Create a zero value of the element type for recursive
processing
+ elemType := fieldType.Elem()
+ elemValue := reflect.Zero(elemType)
+
+ elementFieldType, err := buildFieldType(fory, elemValue)
+ if err != nil {
+ return nil, fmt.Errorf("failed to build element field
type: %w", err)
+ }
+
+ return NewCollectionFieldType(LIST, elementFieldType), nil
+ }
+
+ // Handle map types
+ if fieldType.Kind() == reflect.Map {
+ // Create zero values for key and value types
+ keyType := fieldType.Key()
+ valueType := fieldType.Elem()
+ keyValue := reflect.Zero(keyType)
+ valueValue := reflect.Zero(valueType)
+
+ keyFieldType, err := buildFieldType(fory, keyValue)
+ if err != nil {
+ return nil, fmt.Errorf("failed to build key field type:
%w", err)
+ }
+
+ valueFieldType, err := buildFieldType(fory, valueValue)
+ if err != nil {
+ return nil, fmt.Errorf("failed to build value field
type: %w", err)
+ }
+
+ return NewMapFieldType(MAP, keyFieldType, valueFieldType), nil
+ }
+
+ // For all other types, get the type ID and treat as ObjectFieldType
var typeId TypeId
typeInfo, err := fory.typeResolver.getTypeInfo(fieldValue, true)
if err != nil {
@@ -274,14 +407,10 @@ func buildFieldType(fory *Fory, fieldValue reflect.Value)
(FieldType, error) {
}
typeId = TypeId(typeInfo.TypeID)
- if fieldType.Kind() == reflect.Slice || fieldType.Kind() ==
reflect.Array || fieldType.Kind() == SET {
- panic("not implement yet")
- }
-
- if fieldType.Kind() == reflect.Map {
- panic("not implement yet")
+ if typeId == EXTENSION || typeId == STRUCT || typeId == NAMED_STRUCT ||
+ typeId == COMPATIBLE_STRUCT || typeId ==
NAMED_COMPATIBLE_STRUCT {
+ return NewDynamicFieldType(typeId), nil
}
- // For all other types, treat as ObjectFieldType
- return NewObjectFieldType(typeId), nil
+ return NewSimpleFieldType(typeId), nil
}
diff --git a/go/fory/type_def_encoder.go b/go/fory/type_def_encoder.go
index 866452388..c6300773c 100644
--- a/go/fory/type_def_encoder.go
+++ b/go/fory/type_def_encoder.go
@@ -144,8 +144,8 @@ func writeMetaHeader(buffer *ByteBuffer, typeDef *TypeDef)
error {
// - first 1 byte: header (2 bits field name encoding + 4 bits size +
nullability flag + ref tracking flag)
// - next variable bytes: FieldType info
// - next variable bytes: field name or tag id
-func writeFieldDefs(typeResolver *typeResolver, buffer *ByteBuffer, fieldInfos
[]FieldDef) error {
- for _, field := range fieldInfos {
+func writeFieldDefs(typeResolver *typeResolver, buffer *ByteBuffer, fieldDefs
[]FieldDef) error {
+ for _, field := range fieldDefs {
if err := writeFieldDef(typeResolver, buffer, field); err !=
nil {
return fmt.Errorf("failed to write field def for field
%s: %w", field.name, err)
}
diff --git a/go/fory/type_def_encoder_test.go b/go/fory/type_def_encoder_test.go
index e65474d32..ec7c19219 100644
--- a/go/fory/type_def_encoder_test.go
+++ b/go/fory/type_def_encoder_test.go
@@ -30,54 +30,151 @@ type SimpleStruct struct {
Name string
}
-// TestTypeDefEncodingDecoding tests the encoding and decoding of TypeDef
-func TestTypeDefEncodingDecoding(t *testing.T) {
- // Create a Fory instance for testing
- fory := NewFory(false)
+type SliceStruct struct {
+ ID int32
+ Items []string
+}
- // Create a test struct instance
- testStruct := SimpleStruct{
- ID: 42,
- Name: "test",
- }
+type NestedSliceStruct struct {
+ ID int32
+ Matrix [][]int
+ Records [][]string
+}
- if err := fory.RegisterTagType("example.SimpleStruct", testStruct); err
!= nil {
- t.Fatalf("Failed to register tag type: %v", err)
- }
+type MapStruct struct {
+ ID int32
+ Data map[string]int
+}
- // Build TypeDef from the struct
- structValue := reflect.ValueOf(testStruct)
- originalTypeDef, err := buildTypeDef(fory, structValue)
- if err != nil {
- t.Fatalf("Failed to build TypeDef: %v", err)
+type ComplexStruct struct {
+ ID int32
+ SliceMap map[string][]int
+ MapSlice []map[string]int
+}
+
+// TestTypeDefEncodingDecodingTableDriven tests encoding and decoding of
TypeDef
+// This ensure the peer can successfully encode and decode the same TypeDef,
and obtain appropriate serializer to read or skip data
+func TestTypeDefEncodingDecoding(t *testing.T) {
+ tests := []struct {
+ name string
+ tagName string
+ testStruct interface{}
+ }{
+ {
+ name: "SimpleStruct with basic fields",
+ tagName: "example.SimpleStruct",
+ testStruct: SimpleStruct{
+ ID: 42,
+ Name: "test",
+ },
+ },
+ {
+ name: "SliceStruct with basic items",
+ tagName: "example.SliceStruct",
+ testStruct: SliceStruct{
+ ID: 100,
+ Items: []string{"item1", "item2", "item3"},
+ },
+ },
+ {
+ name: "NestedSliceStruct with nested collections",
+ tagName: "example.NestedSliceStruct",
+ testStruct: NestedSliceStruct{
+ ID: 200,
+ Matrix: [][]int{{1, 2}, {3, 4}},
+ Records: [][]string{{"a", "b"}, {"c", "d"}},
+ },
+ },
+ {
+ name: "MapStruct with map fields",
+ tagName: "example.MapStruct",
+ testStruct: MapStruct{
+ ID: 300,
+ Data: map[string]int{"key1": 1, "key2": 2},
+ },
+ },
+ {
+ name: "ComplexStruct with complex nested types",
+ tagName: "example.ComplexStruct",
+ testStruct: ComplexStruct{
+ ID: 400,
+ SliceMap: map[string][]int{"list1": {1, 2, 3},
"list2": {4, 5, 6}},
+ MapSlice: []map[string]int{{"a": 1}, {"b": 2}},
+ },
+ },
}
- // Create a buffer with the encoded data
- buffer := NewByteBuffer(make([]byte, 0, 256))
- originalTypeDef.writeTypeDef(buffer)
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ fory := NewFory(false)
- // Decode the TypeDef
- decodedTypeDef, err := readTypeDef(fory, buffer,
int64(buffer.ReadInt64()))
- if err != nil {
- t.Fatalf("Failed to decode TypeDef: %v", err)
+ if err := fory.RegisterTagType(tt.tagName,
tt.testStruct); err != nil {
+ t.Fatalf("Failed to register tag type: %v", err)
+ }
+
+ structValue := reflect.ValueOf(tt.testStruct)
+ originalTypeDef, err := buildTypeDef(fory, structValue)
+ if err != nil {
+ t.Fatalf("Failed to build TypeDef: %v", err)
+ }
+
+ buffer := NewByteBuffer(make([]byte, 0, 256))
+ originalTypeDef.writeTypeDef(buffer)
+
+ decodedTypeDef, err := readTypeDef(fory, buffer,
int64(buffer.ReadInt64()))
+ if err != nil {
+ t.Fatalf("Failed to decode TypeDef: %v", err)
+ }
+
+ // basic checks
+ assert.True(t, decodedTypeDef.typeId ==
originalTypeDef.typeId || decodedTypeDef.typeId == -originalTypeDef.typeId,
"TypeId mismatch")
+ assert.Equal(t, originalTypeDef.registerByName,
decodedTypeDef.registerByName, "RegisterByName mismatch")
+ assert.Equal(t, originalTypeDef.compressed,
decodedTypeDef.compressed, "Compressed flag mismatch")
+ assert.Equal(t, len(originalTypeDef.fieldDefs),
len(decodedTypeDef.fieldDefs), "Field count mismatch")
+
+ for i, originalField := range originalTypeDef.fieldDefs
{
+ checkFieldDef(t, originalField,
decodedTypeDef.fieldDefs[i])
+ }
+ })
}
+}
+
+func checkFieldDef(t *testing.T, original, decoded FieldDef) {
+ assert.Equal(t, original.name, decoded.name, "Field name mismatch")
+ assert.Equal(t, original.nameEncoding, decoded.nameEncoding, "Field
name encoding mismatch")
+ assert.Equal(t, original.nullable, decoded.nullable, "Field nullable
mismatch")
+ assert.Equal(t, original.trackingRef, decoded.trackingRef, "Field
trackingRef mismatch")
+ checkFieldTypeRecursively(t, original.fieldType, decoded.fieldType,
original.name)
+}
+
+func checkFieldTypeRecursively(t *testing.T, original, decoded FieldType, path
string) {
+ // Check TypeId
+ assert.Equal(t, original.TypeId(), decoded.TypeId(), "FieldType TypeId
mismatch at path: %s", path)
- // Verify typeId(ignore sign)
- assert.True(t, decodedTypeDef.typeId == originalTypeDef.typeId ||
decodedTypeDef.typeId == -originalTypeDef.typeId, "TypeId mismatch")
- assert.Equal(t, originalTypeDef.registerByName,
decodedTypeDef.registerByName, "RegisterByName mismatch")
- assert.Equal(t, originalTypeDef.compressed, decodedTypeDef.compressed,
"Compressed flag mismatch")
+ // Check type consistency based on the actual type
+ switch originalType := original.(type) {
+ case *SimpleFieldType:
+ _, ok := decoded.(*SimpleFieldType)
+ assert.True(t, ok, "Type mismatch at path %s: original is
SimpleFieldType but decoded is not", path)
- // Verify field count matches
- assert.Equal(t, len(originalTypeDef.fieldDefs),
len(decodedTypeDef.fieldDefs), "Field count mismatch")
+ case *CollectionFieldType:
+ decodedCollection, ok := decoded.(*CollectionFieldType)
+ assert.True(t, ok, "Type mismatch at path %s: original is
CollectionFieldType but decoded is not", path)
+ if ok {
+ // Recursively check element type
+ checkFieldTypeRecursively(t, originalType.elementType,
decodedCollection.elementType, path+"[]")
+ }
- // Verify field names match
- for i, originalField := range originalTypeDef.fieldDefs {
- decodedField := decodedTypeDef.fieldDefs[i]
+ case *MapFieldType:
+ decodedMap, ok := decoded.(*MapFieldType)
+ assert.True(t, ok, "Type mismatch at path %s: original is
MapFieldType but decoded is not", path)
+ if ok {
+ // Recursively check key and value types
+ checkFieldTypeRecursively(t, originalType.keyType,
decodedMap.keyType, path+"[key]")
+ checkFieldTypeRecursively(t, originalType.valueType,
decodedMap.valueType, path+"[value]")
+ }
- assert.Equal(t, originalField.name, decodedField.name, "Field
name mismatch at index %d", i)
- assert.Equal(t, originalField.nameEncoding,
decodedField.nameEncoding, "Field name encoding mismatch at index %d", i)
- assert.Equal(t, originalField.nullable, decodedField.nullable,
"Field nullable mismatch at index %d", i)
- assert.Equal(t, originalField.trackingRef,
decodedField.trackingRef, "Field trackingRef mismatch at index %d", i)
- assert.Equal(t, originalField.fieldType.TypeId(),
decodedField.fieldType.TypeId(), "Field type ID mismatch at index %d", i)
+ default:
+ t.Errorf("Unknown FieldType at path %s: %T", path, original)
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]