zeroshade commented on code in PR #1808:
URL: https://github.com/apache/arrow-adbc/pull/1808#discussion_r1591109804


##########
go/adbc/driver/snowflake/binding.go:
##########
@@ -0,0 +1,141 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package snowflake
+
+import (
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow/go/v17/arrow"
+       "github.com/apache/arrow/go/v17/arrow/array"
+)
+
+func convertArrowToNamedValue(batch arrow.Record, index int) 
([]driver.NamedValue, error) {
+       // see goTypeToSnowflake in gosnowflake
+       // technically, snowflake can bind an array of values at once, but
+       // only for INSERT, so we can't take advantage of that without
+       // analyzing the query ourselves
+       params := make([]driver.NamedValue, batch.NumCols())
+       for i, field := range batch.Schema().Fields() {
+               rawColumn := batch.Column(i)
+               params[i].Ordinal = i + 1
+               switch column := rawColumn.(type) {
+               case *array.Boolean:
+                       params[i].Value = sql.NullBool{
+                               Bool:  column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Float32:
+                       // Snowflake only recognizes float64
+                       params[i].Value = sql.NullFloat64{
+                               Float64: float64(column.Value(index)),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Float64:
+                       params[i].Value = sql.NullFloat64{
+                               Float64: column.Value(index),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Int8:
+                       // Snowflake only recognizes int64
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int16:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int32:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int64:
+                       params[i].Value = sql.NullInt64{
+                               Int64: column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.String:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               case *array.LargeString:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               default:
+                       return nil, adbc.Error{
+                               Code: adbc.StatusNotImplemented,
+                               Msg:  fmt.Sprintf("[Snowflake] Unsupported bind 
param '%s' type %s", field.Name, field.Type.String()),

Review Comment:
   do we want to also support lists / snowflake's ARRAY type? or struct / 
snowflake's Object type by JSON marshalling to a string? we could also just 
punt on that until someone asks for it (probably the better choice right now)



##########
go/adbc/driver/snowflake/binding.go:
##########
@@ -0,0 +1,141 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package snowflake
+
+import (
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow/go/v17/arrow"
+       "github.com/apache/arrow/go/v17/arrow/array"
+)
+
+func convertArrowToNamedValue(batch arrow.Record, index int) 
([]driver.NamedValue, error) {
+       // see goTypeToSnowflake in gosnowflake
+       // technically, snowflake can bind an array of values at once, but
+       // only for INSERT, so we can't take advantage of that without
+       // analyzing the query ourselves
+       params := make([]driver.NamedValue, batch.NumCols())
+       for i, field := range batch.Schema().Fields() {
+               rawColumn := batch.Column(i)
+               params[i].Ordinal = i + 1
+               switch column := rawColumn.(type) {
+               case *array.Boolean:
+                       params[i].Value = sql.NullBool{
+                               Bool:  column.Value(index),
+                               Valid: column.IsValid(index),
+                       }

Review Comment:
   Something for us to think about is that go1.22 added `sql.Null[T]` which 
might be useful to allow us to create a more generic implementation of this 
later on (since we officially still support go 1.21, we wouldn't be able to use 
it yet)



##########
go/adbc/driver/snowflake/binding.go:
##########
@@ -0,0 +1,141 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package snowflake
+
+import (
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow/go/v17/arrow"
+       "github.com/apache/arrow/go/v17/arrow/array"
+)
+
+func convertArrowToNamedValue(batch arrow.Record, index int) 
([]driver.NamedValue, error) {
+       // see goTypeToSnowflake in gosnowflake
+       // technically, snowflake can bind an array of values at once, but
+       // only for INSERT, so we can't take advantage of that without
+       // analyzing the query ourselves
+       params := make([]driver.NamedValue, batch.NumCols())
+       for i, field := range batch.Schema().Fields() {
+               rawColumn := batch.Column(i)
+               params[i].Ordinal = i + 1
+               switch column := rawColumn.(type) {
+               case *array.Boolean:
+                       params[i].Value = sql.NullBool{
+                               Bool:  column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Float32:
+                       // Snowflake only recognizes float64
+                       params[i].Value = sql.NullFloat64{
+                               Float64: float64(column.Value(index)),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Float64:
+                       params[i].Value = sql.NullFloat64{
+                               Float64: column.Value(index),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Int8:
+                       // Snowflake only recognizes int64
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int16:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int32:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int64:
+                       params[i].Value = sql.NullInt64{
+                               Int64: column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.String:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               case *array.LargeString:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               default:
+                       return nil, adbc.Error{
+                               Code: adbc.StatusNotImplemented,
+                               Msg:  fmt.Sprintf("[Snowflake] Unsupported bind 
param '%s' type %s", field.Name, field.Type.String()),
+                       }
+               }
+       }
+       return params, nil
+}
+
+type snowflakeBindReader struct {
+       doQuery      func([]driver.NamedValue) (array.RecordReader, error)
+       currentBatch arrow.Record
+       nextIndex    int64
+       // may be nil if we bound only a batch
+       stream array.RecordReader
+}
+
+func (r *snowflakeBindReader) Release() {
+       if r.currentBatch != nil {
+               r.currentBatch.Release()
+       }
+       if r.stream != nil {
+               r.stream.Release()
+       }
+}
+
+func (r *snowflakeBindReader) Next() (array.RecordReader, error) {
+       params, err := r.NextParams()
+       if err != nil {
+               return nil, err
+       } else if params == nil {

Review Comment:
   no need for `else` since we're returning in the previous condition



##########
go/adbc/driver/snowflake/binding.go:
##########
@@ -0,0 +1,141 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package snowflake
+
+import (
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow/go/v17/arrow"
+       "github.com/apache/arrow/go/v17/arrow/array"
+)
+
+func convertArrowToNamedValue(batch arrow.Record, index int) 
([]driver.NamedValue, error) {
+       // see goTypeToSnowflake in gosnowflake
+       // technically, snowflake can bind an array of values at once, but
+       // only for INSERT, so we can't take advantage of that without
+       // analyzing the query ourselves
+       params := make([]driver.NamedValue, batch.NumCols())
+       for i, field := range batch.Schema().Fields() {
+               rawColumn := batch.Column(i)
+               params[i].Ordinal = i + 1
+               switch column := rawColumn.(type) {
+               case *array.Boolean:
+                       params[i].Value = sql.NullBool{
+                               Bool:  column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Float32:
+                       // Snowflake only recognizes float64
+                       params[i].Value = sql.NullFloat64{
+                               Float64: float64(column.Value(index)),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Float64:
+                       params[i].Value = sql.NullFloat64{
+                               Float64: column.Value(index),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Int8:
+                       // Snowflake only recognizes int64
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int16:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int32:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int64:
+                       params[i].Value = sql.NullInt64{
+                               Int64: column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.String:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               case *array.LargeString:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               default:
+                       return nil, adbc.Error{
+                               Code: adbc.StatusNotImplemented,
+                               Msg:  fmt.Sprintf("[Snowflake] Unsupported bind 
param '%s' type %s", field.Name, field.Type.String()),
+                       }
+               }
+       }
+       return params, nil
+}
+
+type snowflakeBindReader struct {
+       doQuery      func([]driver.NamedValue) (array.RecordReader, error)
+       currentBatch arrow.Record
+       nextIndex    int64
+       // may be nil if we bound only a batch
+       stream array.RecordReader
+}
+
+func (r *snowflakeBindReader) Release() {
+       if r.currentBatch != nil {
+               r.currentBatch.Release()
+       }
+       if r.stream != nil {
+               r.stream.Release()
+       }
+}
+
+func (r *snowflakeBindReader) Next() (array.RecordReader, error) {
+       params, err := r.NextParams()
+       if err != nil {
+               return nil, err
+       } else if params == nil {
+               // end-of-stream
+               return nil, nil

Review Comment:
   `nil` defines end-of-stream instead of using `io.EOF`?



##########
go/adbc/driver/snowflake/binding.go:
##########
@@ -0,0 +1,141 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package snowflake
+
+import (
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow/go/v17/arrow"
+       "github.com/apache/arrow/go/v17/arrow/array"
+)
+
+func convertArrowToNamedValue(batch arrow.Record, index int) 
([]driver.NamedValue, error) {
+       // see goTypeToSnowflake in gosnowflake
+       // technically, snowflake can bind an array of values at once, but
+       // only for INSERT, so we can't take advantage of that without
+       // analyzing the query ourselves

Review Comment:
   I hate this, but nothing we can do right now i guess



##########
go/adbc/driver/snowflake/binding.go:
##########
@@ -0,0 +1,141 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package snowflake
+
+import (
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow/go/v17/arrow"
+       "github.com/apache/arrow/go/v17/arrow/array"
+)
+
+func convertArrowToNamedValue(batch arrow.Record, index int) 
([]driver.NamedValue, error) {
+       // see goTypeToSnowflake in gosnowflake
+       // technically, snowflake can bind an array of values at once, but
+       // only for INSERT, so we can't take advantage of that without
+       // analyzing the query ourselves
+       params := make([]driver.NamedValue, batch.NumCols())
+       for i, field := range batch.Schema().Fields() {
+               rawColumn := batch.Column(i)
+               params[i].Ordinal = i + 1
+               switch column := rawColumn.(type) {
+               case *array.Boolean:
+                       params[i].Value = sql.NullBool{
+                               Bool:  column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Float32:
+                       // Snowflake only recognizes float64
+                       params[i].Value = sql.NullFloat64{
+                               Float64: float64(column.Value(index)),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Float64:
+                       params[i].Value = sql.NullFloat64{
+                               Float64: column.Value(index),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Int8:
+                       // Snowflake only recognizes int64
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int16:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int32:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int64:
+                       params[i].Value = sql.NullInt64{
+                               Int64: column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.String:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               case *array.LargeString:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               default:
+                       return nil, adbc.Error{
+                               Code: adbc.StatusNotImplemented,
+                               Msg:  fmt.Sprintf("[Snowflake] Unsupported bind 
param '%s' type %s", field.Name, field.Type.String()),
+                       }
+               }
+       }
+       return params, nil
+}
+
+type snowflakeBindReader struct {
+       doQuery      func([]driver.NamedValue) (array.RecordReader, error)
+       currentBatch arrow.Record
+       nextIndex    int64
+       // may be nil if we bound only a batch
+       stream array.RecordReader
+}
+
+func (r *snowflakeBindReader) Release() {
+       if r.currentBatch != nil {
+               r.currentBatch.Release()
+       }

Review Comment:
   we might still end up with a double release here because we don't set 
`r.currentBatch` to `nil` after calling `stream.Next()` and getting the end of 
the stream. we should probably just always set `r.currentBatch = nil` before we 
call `stream.Next()` to ensure we don't double release. You can test for excess 
releases by running with `-tags assert` which will turn on the debug asserts in 
Arrow



##########
go/adbc/driver/snowflake/binding.go:
##########
@@ -0,0 +1,141 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package snowflake
+
+import (
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow/go/v17/arrow"
+       "github.com/apache/arrow/go/v17/arrow/array"
+)
+
+func convertArrowToNamedValue(batch arrow.Record, index int) 
([]driver.NamedValue, error) {
+       // see goTypeToSnowflake in gosnowflake
+       // technically, snowflake can bind an array of values at once, but
+       // only for INSERT, so we can't take advantage of that without
+       // analyzing the query ourselves
+       params := make([]driver.NamedValue, batch.NumCols())
+       for i, field := range batch.Schema().Fields() {
+               rawColumn := batch.Column(i)
+               params[i].Ordinal = i + 1
+               switch column := rawColumn.(type) {
+               case *array.Boolean:
+                       params[i].Value = sql.NullBool{
+                               Bool:  column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Float32:
+                       // Snowflake only recognizes float64
+                       params[i].Value = sql.NullFloat64{
+                               Float64: float64(column.Value(index)),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Float64:
+                       params[i].Value = sql.NullFloat64{
+                               Float64: column.Value(index),
+                               Valid:   column.IsValid(index),
+                       }
+               case *array.Int8:
+                       // Snowflake only recognizes int64
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int16:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int32:
+                       params[i].Value = sql.NullInt64{
+                               Int64: int64(column.Value(index)),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.Int64:
+                       params[i].Value = sql.NullInt64{
+                               Int64: column.Value(index),
+                               Valid: column.IsValid(index),
+                       }
+               case *array.String:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               case *array.LargeString:
+                       params[i].Value = sql.NullString{
+                               String: column.Value(index),
+                               Valid:  column.IsValid(index),
+                       }
+               default:
+                       return nil, adbc.Error{
+                               Code: adbc.StatusNotImplemented,
+                               Msg:  fmt.Sprintf("[Snowflake] Unsupported bind 
param '%s' type %s", field.Name, field.Type.String()),
+                       }
+               }
+       }
+       return params, nil
+}
+
+type snowflakeBindReader struct {
+       doQuery      func([]driver.NamedValue) (array.RecordReader, error)
+       currentBatch arrow.Record
+       nextIndex    int64
+       // may be nil if we bound only a batch
+       stream array.RecordReader
+}
+
+func (r *snowflakeBindReader) Release() {
+       if r.currentBatch != nil {
+               r.currentBatch.Release()
+       }
+       if r.stream != nil {
+               r.stream.Release()
+       }

Review Comment:
   we should probably also set both `r.currentBatch` and `r.stream` to `nil` 
during release, just to prevent issues if release is called multiple times 
somehow



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to