This is an automated email from the ASF dual-hosted git repository.

sbinet pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow.git


The following commit(s) were added to refs/heads/master by this push:
     new e4ae2f6  ARROW-3951: [Go] implement a CSV writer
e4ae2f6 is described below

commit e4ae2f652c9a84eebc9c60ff3d90e77d814ea3de
Author: Anson Qian <[email protected]>
AuthorDate: Mon Mar 11 14:33:55 2019 +0100

    ARROW-3951: [Go] implement a CSV writer
    
    @sbinet
    
    Author: Anson Qian <[email protected]>
    
    Closes #3755 from anson627/arrow-3951 and squashes the following commits:
    
    df1735a3 <Anson Qian> Fix reader test
    9bc8dc0e <Anson Qian> Fix unit test
    6e63617a <Anson Qian> Fix typo
    7624a972 <Anson Qian> Add example and bump up test coverage
    f460e192 <Anson Qian> Add newline at end of file
    947235c6 <Anson Qian> Consoliate option for reader and writer
    2a57a679 <Anson Qian> Add memory size check
    e00638e8 <Anson Qian> Address code reviews
    92cbcea5 <Anson Qian> ARROW-3951  implement a CSV writer
---
 go/arrow/csv/common.go                       | 119 ++++++++++++++++++++
 go/arrow/csv/{csv.go => reader.go}           |  58 ----------
 go/arrow/csv/{csv_test.go => reader_test.go} |  76 ++++++-------
 go/arrow/csv/writer.go                       | 129 +++++++++++++++++++++
 go/arrow/csv/writer_test.go                  | 162 +++++++++++++++++++++++++++
 5 files changed, 448 insertions(+), 96 deletions(-)

diff --git a/go/arrow/csv/common.go b/go/arrow/csv/common.go
new file mode 100644
index 0000000..baa3edd
--- /dev/null
+++ b/go/arrow/csv/common.go
@@ -0,0 +1,119 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package csv reads CSV files and presents the extracted data as records, also
+// writes data as record into CSV files
+package csv
+
+import (
+       "errors"
+       "fmt"
+
+       "github.com/apache/arrow/go/arrow"
+       "github.com/apache/arrow/go/arrow/memory"
+)
+
+var (
+       ErrMismatchFields = errors.New("arrow/csv: number of records mismatch")
+)
+
+// Option configures a CSV reader/writer.
+type Option func(config)
+type config interface{}
+
+// WithComma specifies the fields separation character used while parsing CSV 
files.
+func WithComma(c rune) Option {
+       return func(cfg config) {
+               switch cfg := cfg.(type) {
+               case *Reader:
+                       cfg.r.Comma = c
+               case *Writer:
+                       cfg.w.Comma = c
+               default:
+                       panic(fmt.Errorf("arrow/csv: unknown config type %T", 
cfg))
+               }
+       }
+}
+
+// WithComment specifies the comment character used while parsing CSV files.
+func WithComment(c rune) Option {
+       return func(cfg config) {
+               switch cfg := cfg.(type) {
+               case *Reader:
+                       cfg.r.Comment = c
+               default:
+                       panic(fmt.Errorf("arrow/csv: unknown config type %T", 
cfg))
+               }
+       }
+}
+
+// WithAllocator specifies the Arrow memory allocator used while building 
records.
+func WithAllocator(mem memory.Allocator) Option {
+       return func(cfg config) {
+               switch cfg := cfg.(type) {
+               case *Reader:
+                       cfg.mem = mem
+               default:
+                       panic(fmt.Errorf("arrow/csv: unknown config type %T", 
cfg))
+               }
+       }
+}
+
+// WithChunk specifies the chunk size used while parsing CSV files.
+//
+// If n is zero or 1, no chunking will take place and the reader will create
+// one record per row.
+// If n is greater than 1, chunks of n rows will be read.
+// If n is negative, the reader will load the whole CSV file into memory and
+// create one big record with all the rows.
+func WithChunk(n int) Option {
+       return func(cfg config) {
+               switch cfg := cfg.(type) {
+               case *Reader:
+                       cfg.chunk = n
+               default:
+                       panic(fmt.Errorf("arrow/csv: unknown config type %T", 
cfg))
+               }
+       }
+}
+
+// WithCRLF specifies the line terminator used while writing CSV files.
+// If useCRLF is true, \r\n is used as the line terminator, otherwise \n is 
used.
+// The default value is false.
+func WithCRLF(useCRLF bool) Option {
+       return func(cfg config) {
+               switch cfg := cfg.(type) {
+               case *Writer:
+                       cfg.w.UseCRLF = useCRLF
+               default:
+                       panic(fmt.Errorf("arrow/csv: unknown config type %T", 
cfg))
+               }
+       }
+}
+
+func validate(schema *arrow.Schema) {
+       for i, f := range schema.Fields() {
+               switch ft := f.Type.(type) {
+               case *arrow.BooleanType:
+               case *arrow.Int8Type, *arrow.Int16Type, *arrow.Int32Type, 
*arrow.Int64Type:
+               case *arrow.Uint8Type, *arrow.Uint16Type, *arrow.Uint32Type, 
*arrow.Uint64Type:
+               case *arrow.Float32Type, *arrow.Float64Type:
+               case *arrow.StringType:
+               default:
+                       panic(fmt.Errorf("arrow/csv: field %d (%s) has invalid 
data type %T", i, f.Name, ft))
+               }
+       }
+}
diff --git a/go/arrow/csv/csv.go b/go/arrow/csv/reader.go
similarity index 82%
rename from go/arrow/csv/csv.go
rename to go/arrow/csv/reader.go
index 022c46d..c54beb7 100644
--- a/go/arrow/csv/csv.go
+++ b/go/arrow/csv/reader.go
@@ -14,13 +14,10 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
-// Package csv reads CSV files and presents the extracted data as records.
 package csv
 
 import (
        "encoding/csv"
-       "errors"
-       "fmt"
        "io"
        "strconv"
        "sync/atomic"
@@ -31,47 +28,6 @@ import (
        "github.com/apache/arrow/go/arrow/memory"
 )
 
-var (
-       ErrMismatchFields = errors.New("arrow/csv: number of records mismatch")
-)
-
-// Option configures a CSV reader.
-type Option func(*Reader)
-
-// WithComment specifies the comment character used while parsing CSV files.
-func WithComment(c rune) Option {
-       return func(r *Reader) {
-               r.r.Comment = c
-       }
-}
-
-// WithComma specifies the fields separation character used while parsing CSV 
files.
-func WithComma(c rune) Option {
-       return func(r *Reader) {
-               r.r.Comma = c
-       }
-}
-
-// WithAllocator specifies the Arrow memory allocator used while building 
records.
-func WithAllocator(mem memory.Allocator) Option {
-       return func(r *Reader) {
-               r.mem = mem
-       }
-}
-
-// WithChunk specifies the chunk size used while parsing CSV files.
-//
-// If n is zero or 1, no chunking will take place and the reader will create
-// one record per row.
-// If n is greater than 1, chunks of n rows will be read.
-// If n is negative, the reader will load the whole CSV file into memory and
-// create one big record with all the rows.
-func WithChunk(n int) Option {
-       return func(r *Reader) {
-               r.chunk = n
-       }
-}
-
 // Reader wraps encoding/csv.Reader and creates array.Records from a schema.
 type Reader struct {
        r      *csv.Reader
@@ -392,20 +348,6 @@ func (r *Reader) Release() {
        }
 }
 
-func validate(schema *arrow.Schema) {
-       for i, f := range schema.Fields() {
-               switch ft := f.Type.(type) {
-               case *arrow.BooleanType:
-               case *arrow.Int8Type, *arrow.Int16Type, *arrow.Int32Type, 
*arrow.Int64Type:
-               case *arrow.Uint8Type, *arrow.Uint16Type, *arrow.Uint32Type, 
*arrow.Uint64Type:
-               case *arrow.Float32Type, *arrow.Float64Type:
-               case *arrow.StringType:
-               default:
-                       panic(fmt.Errorf("arrow/csv: field %d (%s) has invalid 
data type %T", i, f.Name, ft))
-               }
-       }
-}
-
 var (
        _ array.RecordReader = (*Reader)(nil)
 )
diff --git a/go/arrow/csv/csv_test.go b/go/arrow/csv/reader_test.go
similarity index 92%
rename from go/arrow/csv/csv_test.go
rename to go/arrow/csv/reader_test.go
index 97f31cc..0c9507e 100644
--- a/go/arrow/csv/csv_test.go
+++ b/go/arrow/csv/reader_test.go
@@ -56,42 +56,42 @@ func Example() {
        for r.Next() {
                rec := r.Record()
                for i, col := range rec.Columns() {
-                       fmt.Printf("rec[%d][%q]: %v\n", i, rec.ColumnName(i), 
col)
+                       fmt.Printf("rec[%d][%q]: %v\n", n, rec.ColumnName(i), 
col)
                }
                n++
        }
 
        // Output:
        // rec[0]["i64"]: [0]
-       // rec[1]["f64"]: [0]
-       // rec[2]["str"]: ["str-0"]
-       // rec[0]["i64"]: [1]
+       // rec[0]["f64"]: [0]
+       // rec[0]["str"]: ["str-0"]
+       // rec[1]["i64"]: [1]
        // rec[1]["f64"]: [1]
-       // rec[2]["str"]: ["str-1"]
-       // rec[0]["i64"]: [2]
-       // rec[1]["f64"]: [2]
+       // rec[1]["str"]: ["str-1"]
+       // rec[2]["i64"]: [2]
+       // rec[2]["f64"]: [2]
        // rec[2]["str"]: ["str-2"]
-       // rec[0]["i64"]: [3]
-       // rec[1]["f64"]: [3]
-       // rec[2]["str"]: ["str-3"]
-       // rec[0]["i64"]: [4]
-       // rec[1]["f64"]: [4]
-       // rec[2]["str"]: ["str-4"]
-       // rec[0]["i64"]: [5]
-       // rec[1]["f64"]: [5]
-       // rec[2]["str"]: ["str-5"]
-       // rec[0]["i64"]: [6]
-       // rec[1]["f64"]: [6]
-       // rec[2]["str"]: ["str-6"]
-       // rec[0]["i64"]: [7]
-       // rec[1]["f64"]: [7]
-       // rec[2]["str"]: ["str-7"]
-       // rec[0]["i64"]: [8]
-       // rec[1]["f64"]: [8]
-       // rec[2]["str"]: ["str-8"]
-       // rec[0]["i64"]: [9]
-       // rec[1]["f64"]: [9]
-       // rec[2]["str"]: ["str-9"]
+       // rec[3]["i64"]: [3]
+       // rec[3]["f64"]: [3]
+       // rec[3]["str"]: ["str-3"]
+       // rec[4]["i64"]: [4]
+       // rec[4]["f64"]: [4]
+       // rec[4]["str"]: ["str-4"]
+       // rec[5]["i64"]: [5]
+       // rec[5]["f64"]: [5]
+       // rec[5]["str"]: ["str-5"]
+       // rec[6]["i64"]: [6]
+       // rec[6]["f64"]: [6]
+       // rec[6]["str"]: ["str-6"]
+       // rec[7]["i64"]: [7]
+       // rec[7]["f64"]: [7]
+       // rec[7]["str"]: ["str-7"]
+       // rec[8]["i64"]: [8]
+       // rec[8]["f64"]: [8]
+       // rec[8]["str"]: ["str-8"]
+       // rec[9]["i64"]: [9]
+       // rec[9]["f64"]: [9]
+       // rec[9]["str"]: ["str-9"]
 }
 
 func Example_withChunk() {
@@ -127,24 +127,24 @@ func Example_withChunk() {
        for r.Next() {
                rec := r.Record()
                for i, col := range rec.Columns() {
-                       fmt.Printf("rec[%d][%q]: %v\n", i, rec.ColumnName(i), 
col)
+                       fmt.Printf("rec[%d][%q]: %v\n", n, rec.ColumnName(i), 
col)
                }
                n++
        }
 
        // Output:
        // rec[0]["i64"]: [0 1 2]
-       // rec[1]["f64"]: [0 1 2]
-       // rec[2]["str"]: ["str-0" "str-1" "str-2"]
-       // rec[0]["i64"]: [3 4 5]
+       // rec[0]["f64"]: [0 1 2]
+       // rec[0]["str"]: ["str-0" "str-1" "str-2"]
+       // rec[1]["i64"]: [3 4 5]
        // rec[1]["f64"]: [3 4 5]
-       // rec[2]["str"]: ["str-3" "str-4" "str-5"]
-       // rec[0]["i64"]: [6 7 8]
-       // rec[1]["f64"]: [6 7 8]
+       // rec[1]["str"]: ["str-3" "str-4" "str-5"]
+       // rec[2]["i64"]: [6 7 8]
+       // rec[2]["f64"]: [6 7 8]
        // rec[2]["str"]: ["str-6" "str-7" "str-8"]
-       // rec[0]["i64"]: [9]
-       // rec[1]["f64"]: [9]
-       // rec[2]["str"]: ["str-9"]
+       // rec[3]["i64"]: [9]
+       // rec[3]["f64"]: [9]
+       // rec[3]["str"]: ["str-9"]
 }
 
 func TestCSVReader(t *testing.T) {
diff --git a/go/arrow/csv/writer.go b/go/arrow/csv/writer.go
new file mode 100644
index 0000000..b8e0854
--- /dev/null
+++ b/go/arrow/csv/writer.go
@@ -0,0 +1,129 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package csv
+
+import (
+       "encoding/csv"
+       "fmt"
+       "io"
+
+       "github.com/apache/arrow/go/arrow"
+       "github.com/apache/arrow/go/arrow/array"
+)
+
+// Writer wraps encoding/csv.Writer and writes array.Record based on a schema.
+type Writer struct {
+       w      *csv.Writer
+       schema *arrow.Schema
+}
+
+// NewWriter returns a writer that writes array.Records to the CSV file
+// with the given schema.
+//
+// NewWriter panics if the given schema contains fields that have types that 
are not
+// primitive types.
+func NewWriter(w io.Writer, schema *arrow.Schema, opts ...Option) *Writer {
+       validate(schema)
+
+       ww := &Writer{w: csv.NewWriter(w), schema: schema}
+       for _, opt := range opts {
+               opt(ww)
+       }
+
+       return ww
+}
+
+func (w *Writer) Schema() *arrow.Schema { return w.schema }
+
+// Write writes a single Record as one row to the CSV file
+func (w *Writer) Write(record array.Record) error {
+       if !record.Schema().Equal(w.schema) {
+               return ErrMismatchFields
+       }
+
+       recs := make([][]string, record.NumRows())
+       for i := range recs {
+               recs[i] = make([]string, record.NumCols())
+       }
+
+       for j, col := range record.Columns() {
+               switch w.schema.Field(j).Type.(type) {
+               case *arrow.BooleanType:
+                       arr := col.(*array.Boolean)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Int8Type:
+                       arr := col.(*array.Int8)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Int16Type:
+                       arr := col.(*array.Int16)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Int32Type:
+                       arr := col.(*array.Int32)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Int64Type:
+                       arr := col.(*array.Int64)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Uint8Type:
+                       arr := col.(*array.Uint8)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Uint16Type:
+                       arr := col.(*array.Uint16)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Uint32Type:
+                       arr := col.(*array.Uint32)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Uint64Type:
+                       arr := col.(*array.Uint64)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Float32Type:
+                       arr := col.(*array.Float32)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.Float64Type:
+                       arr := col.(*array.Float64)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               case *arrow.StringType:
+                       arr := col.(*array.String)
+                       for i := 0; i < arr.Len(); i++ {
+                               recs[i][j] = fmt.Sprintf("%v", arr.Value(i))
+                       }
+               }
+       }
+
+       return w.w.WriteAll(recs)
+}
diff --git a/go/arrow/csv/writer_test.go b/go/arrow/csv/writer_test.go
new file mode 100644
index 0000000..d554a77
--- /dev/null
+++ b/go/arrow/csv/writer_test.go
@@ -0,0 +1,162 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package csv_test
+
+import (
+       "bytes"
+       "fmt"
+       "log"
+       "strings"
+       "testing"
+
+       "github.com/apache/arrow/go/arrow"
+       "github.com/apache/arrow/go/arrow/array"
+       "github.com/apache/arrow/go/arrow/csv"
+       "github.com/apache/arrow/go/arrow/memory"
+)
+
+func Example_writer() {
+       f := new(bytes.Buffer)
+
+       pool := memory.NewGoAllocator()
+       schema := arrow.NewSchema(
+               []arrow.Field{
+                       {Name: "i64", Type: arrow.PrimitiveTypes.Int64},
+                       {Name: "f64", Type: arrow.PrimitiveTypes.Float64},
+                       {Name: "str", Type: arrow.BinaryTypes.String},
+               },
+               nil,
+       )
+
+       b := array.NewRecordBuilder(pool, schema)
+       defer b.Release()
+
+       b.Field(0).(*array.Int64Builder).AppendValues([]int64{0, 1, 2, 3, 4, 5, 
6, 7, 8, 9}, nil)
+       b.Field(1).(*array.Float64Builder).AppendValues([]float64{0, 1, 2, 3, 
4, 5, 6, 7, 8, 9}, nil)
+       b.Field(2).(*array.StringBuilder).AppendValues([]string{"str-0", 
"str-1", "str-2", "str-3", "str-4", "str-5", "str-6", "str-7", "str-8", 
"str-9"}, nil)
+
+       rec := b.NewRecord()
+       defer rec.Release()
+
+       w := csv.NewWriter(f, schema, csv.WithComma(';'))
+       err := w.Write(rec)
+       if err != nil {
+               log.Fatal(err)
+       }
+
+       r := csv.NewReader(f, schema, csv.WithComment('#'), csv.WithComma(';'))
+       defer r.Release()
+
+       n := 0
+       for r.Next() {
+               rec := r.Record()
+               for i, col := range rec.Columns() {
+                       fmt.Printf("rec[%d][%q]: %v\n", n, rec.ColumnName(i), 
col)
+               }
+               n++
+       }
+
+       // Output:
+       // rec[0]["i64"]: [0]
+       // rec[0]["f64"]: [0]
+       // rec[0]["str"]: ["str-0"]
+       // rec[1]["i64"]: [1]
+       // rec[1]["f64"]: [1]
+       // rec[1]["str"]: ["str-1"]
+       // rec[2]["i64"]: [2]
+       // rec[2]["f64"]: [2]
+       // rec[2]["str"]: ["str-2"]
+       // rec[3]["i64"]: [3]
+       // rec[3]["f64"]: [3]
+       // rec[3]["str"]: ["str-3"]
+       // rec[4]["i64"]: [4]
+       // rec[4]["f64"]: [4]
+       // rec[4]["str"]: ["str-4"]
+       // rec[5]["i64"]: [5]
+       // rec[5]["f64"]: [5]
+       // rec[5]["str"]: ["str-5"]
+       // rec[6]["i64"]: [6]
+       // rec[6]["f64"]: [6]
+       // rec[6]["str"]: ["str-6"]
+       // rec[7]["i64"]: [7]
+       // rec[7]["f64"]: [7]
+       // rec[7]["str"]: ["str-7"]
+       // rec[8]["i64"]: [8]
+       // rec[8]["f64"]: [8]
+       // rec[8]["str"]: ["str-8"]
+       // rec[9]["i64"]: [9]
+       // rec[9]["f64"]: [9]
+       // rec[9]["str"]: ["str-9"]
+}
+
+func TestCSVWriter(t *testing.T) {
+       f := new(bytes.Buffer)
+
+       pool := memory.NewCheckedAllocator(memory.NewGoAllocator())
+       defer pool.AssertSize(t, 0)
+       schema := arrow.NewSchema(
+               []arrow.Field{
+                       {Name: "bool", Type: arrow.FixedWidthTypes.Boolean},
+                       {Name: "i8", Type: arrow.PrimitiveTypes.Int8},
+                       {Name: "i16", Type: arrow.PrimitiveTypes.Int16},
+                       {Name: "i32", Type: arrow.PrimitiveTypes.Int32},
+                       {Name: "i64", Type: arrow.PrimitiveTypes.Int64},
+                       {Name: "u8", Type: arrow.PrimitiveTypes.Uint8},
+                       {Name: "u16", Type: arrow.PrimitiveTypes.Uint16},
+                       {Name: "u32", Type: arrow.PrimitiveTypes.Uint32},
+                       {Name: "u64", Type: arrow.PrimitiveTypes.Uint64},
+                       {Name: "f32", Type: arrow.PrimitiveTypes.Float32},
+                       {Name: "f64", Type: arrow.PrimitiveTypes.Float64},
+                       {Name: "str", Type: arrow.BinaryTypes.String},
+               },
+               nil,
+       )
+
+       b := array.NewRecordBuilder(pool, schema)
+       defer b.Release()
+
+       b.Field(0).(*array.BooleanBuilder).AppendValues([]bool{true, false, 
true}, nil)
+       b.Field(1).(*array.Int8Builder).AppendValues([]int8{-1, 0, 1}, nil)
+       b.Field(2).(*array.Int16Builder).AppendValues([]int16{-1, 0, 1}, nil)
+       b.Field(3).(*array.Int32Builder).AppendValues([]int32{-1, 0, 1}, nil)
+       b.Field(4).(*array.Int64Builder).AppendValues([]int64{-1, 0, 1}, nil)
+       b.Field(5).(*array.Uint8Builder).AppendValues([]uint8{0, 1, 2}, nil)
+       b.Field(6).(*array.Uint16Builder).AppendValues([]uint16{0, 1, 2}, nil)
+       b.Field(7).(*array.Uint32Builder).AppendValues([]uint32{0, 1, 2}, nil)
+       b.Field(8).(*array.Uint64Builder).AppendValues([]uint64{0, 1, 2}, nil)
+       b.Field(9).(*array.Float32Builder).AppendValues([]float32{0.0, 0.1, 
0.2}, nil)
+       b.Field(10).(*array.Float64Builder).AppendValues([]float64{0.0, 0.1, 
0.2}, nil)
+       b.Field(11).(*array.StringBuilder).AppendValues([]string{"str-0", 
"str-1", "str-2"}, nil)
+
+       rec := b.NewRecord()
+       defer rec.Release()
+
+       w := csv.NewWriter(f, schema, csv.WithComma(';'), csv.WithCRLF(false))
+       err := w.Write(rec)
+       if err != nil {
+               t.Fatal(err)
+       }
+
+       want := `true;-1;-1;-1;-1;0;0;0;0;0;0;str-0
+false;0;0;0;0;1;1;1;1;0.1;0.1;str-1
+true;1;1;1;1;2;2;2;2;0.2;0.2;str-2
+`
+
+       if got, want := f.String(), want; strings.Compare(got, want) != 0 {
+               t.Fatalf("invalid output:\ngot=%s\nwant=%s\n", got, want)
+       }
+}

Reply via email to