lidavidm commented on code in PR #2998:
URL: https://github.com/apache/arrow-adbc/pull/2998#discussion_r2252920241


##########
go/adbc/driver/databricks/statement_test.go:
##########
@@ -0,0 +1,55 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks_test
+
+import (
+       "context"
+       "testing"
+
+       "github.com/apache/arrow-adbc/go/adbc/driver/databricks"
+       "github.com/stretchr/testify/assert"
+)
+
+func TestStatementBasic(t *testing.T) {
+       // This is a basic test to ensure the code compiles
+       // Real tests would require a connection to Databricks
+
+       _ = context.Background()

Review Comment:
   nit: why do we need this?



##########
go/adbc/driver/databricks/statement.go:
##########
@@ -0,0 +1,275 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks
+
+import (
+       "context"
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+       "reflect"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow-go/v18/arrow"
+       "github.com/apache/arrow-go/v18/arrow/array"
+
+       dbsqlrows "github.com/databricks/databricks-sql-go/rows"
+)
+
+type statementImpl struct {
+       conn       *connectionImpl
+       query      string
+       parameters []interface{}
+       prepared   *sql.Stmt
+}
+
+func (s *statementImpl) Close() error {
+       if s.prepared != nil {
+               return s.prepared.Close()
+       }
+       return nil
+}
+
+func (s *statementImpl) SetOption(key, val string) error {
+       // No statement-specific options are supported yet
+       return adbc.Error{
+               Code: adbc.StatusNotImplemented,
+               Msg:  fmt.Sprintf("unsupported statement option: %s", key),
+       }
+}
+
+func (s *statementImpl) SetSqlQuery(query string) error {
+       s.query = query
+       // Reset prepared statement if query changes
+       if s.prepared != nil {
+               _ = s.prepared.Close() // Ignore error on cleanup
+               s.prepared = nil
+       }
+       return nil
+}
+
+func (s *statementImpl) Prepare(ctx context.Context) error {
+       if s.query == "" {
+               return adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       stmt, err := s.conn.db.PrepareContext(ctx, s.query)
+       if err != nil {
+               return adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  fmt.Sprintf("failed to prepare statement: %v", 
err),
+               }
+       }
+
+       s.prepared = stmt
+       return nil
+}
+
+func (s *statementImpl) ExecuteQuery(ctx context.Context) (array.RecordReader, 
int64, error) {
+       if s.query == "" {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       // Get raw connection to access Arrow batches directly
+       conn, err := s.conn.db.Conn(ctx)
+       if err != nil {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to get raw connection: %v", 
err),
+               }
+       }
+       defer func() { _ = conn.Close() }()

Review Comment:
   Ideally we record the error?



##########
go/adbc/driver/databricks/statement.go:
##########
@@ -0,0 +1,275 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks
+
+import (
+       "context"
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+       "reflect"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow-go/v18/arrow"
+       "github.com/apache/arrow-go/v18/arrow/array"
+
+       dbsqlrows "github.com/databricks/databricks-sql-go/rows"
+)
+
+type statementImpl struct {
+       conn       *connectionImpl
+       query      string
+       parameters []interface{}
+       prepared   *sql.Stmt
+}
+
+func (s *statementImpl) Close() error {
+       if s.prepared != nil {
+               return s.prepared.Close()
+       }
+       return nil
+}
+
+func (s *statementImpl) SetOption(key, val string) error {
+       // No statement-specific options are supported yet
+       return adbc.Error{
+               Code: adbc.StatusNotImplemented,
+               Msg:  fmt.Sprintf("unsupported statement option: %s", key),
+       }
+}
+
+func (s *statementImpl) SetSqlQuery(query string) error {
+       s.query = query
+       // Reset prepared statement if query changes
+       if s.prepared != nil {
+               _ = s.prepared.Close() // Ignore error on cleanup
+               s.prepared = nil
+       }
+       return nil
+}
+
+func (s *statementImpl) Prepare(ctx context.Context) error {
+       if s.query == "" {
+               return adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       stmt, err := s.conn.db.PrepareContext(ctx, s.query)
+       if err != nil {
+               return adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  fmt.Sprintf("failed to prepare statement: %v", 
err),
+               }
+       }
+
+       s.prepared = stmt
+       return nil
+}
+
+func (s *statementImpl) ExecuteQuery(ctx context.Context) (array.RecordReader, 
int64, error) {
+       if s.query == "" {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       // Get raw connection to access Arrow batches directly
+       conn, err := s.conn.db.Conn(ctx)
+       if err != nil {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to get raw connection: %v", 
err),
+               }
+       }
+       defer func() { _ = conn.Close() }()
+
+       // Execute query using raw driver interface to get Arrow batches
+       var driverRows driver.Rows
+       err = conn.Raw(func(driverConn interface{}) error {
+               // Use raw driver interface for direct Arrow access
+               if queryerCtx, ok := driverConn.(driver.QueryerContext); ok {
+                       // Convert parameters to driver.NamedValue slice
+                       var driverArgs []driver.NamedValue
+                       for i, param := range s.parameters {
+                               driverArgs = append(driverArgs, 
driver.NamedValue{
+                                       Ordinal: i + 1,
+                                       Value:   param,
+                               })
+                       }
+                       driverRows, err = queryerCtx.QueryContext(ctx, s.query, 
driverArgs)
+                       return err
+               }
+               return fmt.Errorf("driver does not support QueryerContext")
+       })
+
+       if err != nil {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to execute query: %v", err),
+               }
+       }
+       defer func() { _ = driverRows.Close() }()
+
+       // Convert to databricks rows interface to get Arrow batches
+       databricksRows, ok := driverRows.(dbsqlrows.Rows)
+       if !ok {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  "driver rows do not support Arrow batches",
+               }
+       }
+
+       // Use the IPC stream interface (zero-copy)
+       reader, err := newIPCReaderAdapter(ctx, databricksRows)
+       if err != nil {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to create IPC reader adapter: 
%v", err),
+               }
+       }
+
+       // Return -1 for rowsAffected (unknown) since we can't count without 
consuming
+       // The ADBC spec allows -1 to indicate "unknown number of rows affected"
+       return reader, -1, nil
+}
+
+func (s *statementImpl) ExecuteUpdate(ctx context.Context) (int64, error) {
+       var result sql.Result
+       var err error
+
+       if s.prepared != nil {
+               result, err = s.prepared.ExecContext(ctx, s.parameters...)
+       } else if s.query != "" {
+               result, err = s.conn.db.ExecContext(ctx, s.query, 
s.parameters...)
+       } else {
+               return -1, adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       if err != nil {
+               return -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to execute update: %v", err),
+               }
+       }
+
+       rowsAffected, err := result.RowsAffected()
+       if err != nil {
+               return -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to get rows affected: %v", 
err),
+               }
+       }
+
+       return rowsAffected, nil
+}
+
+func (s *statementImpl) Bind(ctx context.Context, values arrow.Record) error {
+       // Convert Arrow record to parameters
+       s.parameters = make([]interface{}, values.NumCols())
+
+       for i := 0; i < int(values.NumCols()); i++ {
+               col := values.Column(i)
+               if col.Len() == 0 {
+                       s.parameters[i] = nil
+                       continue
+               }
+
+               // Take the first value from each column

Review Comment:
   That would also be surprising.



##########
go/adbc/driver/databricks/driver_test.go:
##########
@@ -0,0 +1,116 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks_test
+
+import (
+       "context"
+       "testing"
+
+       "github.com/apache/arrow-adbc/go/adbc/driver/databricks"
+       "github.com/apache/arrow-go/v18/arrow/memory"
+       "github.com/stretchr/testify/assert"
+       "github.com/stretchr/testify/require"
+)
+
+func TestDriverCreation(t *testing.T) {
+       driver := databricks.NewDriver(memory.DefaultAllocator)
+       assert.NotNil(t, driver)
+}
+
+func TestDatabaseCreation(t *testing.T) {
+       driver := databricks.NewDriver(memory.DefaultAllocator)
+
+       // Test with minimal options
+       opts := map[string]string{
+               databricks.OptionServerHostname: "test-hostname",
+               databricks.OptionHTTPPath:       "/sql/1.0/warehouses/test",
+               databricks.OptionAccessToken:    "test-token",
+       }
+
+       db, err := driver.NewDatabase(opts)
+       require.NoError(t, err)
+       assert.NotNil(t, db)
+
+       // Clean up
+       err = db.Close()
+       assert.NoError(t, err)
+}
+
+func TestDatabaseOptionsValidation(t *testing.T) {
+       driver := databricks.NewDriver(memory.DefaultAllocator)
+
+       // Test missing required options
+       tests := []struct {
+               name    string
+               opts    map[string]string
+               wantErr bool
+       }{
+               {
+                       name: "missing hostname",
+                       opts: map[string]string{
+                               databricks.OptionHTTPPath:    
"/sql/1.0/warehouses/test",
+                               databricks.OptionAccessToken: "test-token",
+                       },
+                       wantErr: true,
+               },
+               {
+                       name: "missing http path",
+                       opts: map[string]string{
+                               databricks.OptionServerHostname: 
"test-hostname",
+                               databricks.OptionAccessToken:    "test-token",
+                       },
+                       wantErr: true,
+               },
+               {
+                       name: "missing access token",
+                       opts: map[string]string{
+                               databricks.OptionServerHostname: 
"test-hostname",
+                               databricks.OptionHTTPPath:       
"/sql/1.0/warehouses/test",
+                       },
+                       wantErr: true,
+               },
+               {
+                       name: "all required options",
+                       opts: map[string]string{
+                               databricks.OptionServerHostname: 
"test-hostname",
+                               databricks.OptionHTTPPath:       
"/sql/1.0/warehouses/test",
+                               databricks.OptionAccessToken:    "test-token",
+                       },
+                       wantErr: false,
+               },
+       }
+
+       for _, tt := range tests {
+               t.Run(tt.name, func(t *testing.T) {
+                       db, err := driver.NewDatabase(tt.opts)
+                       require.NoError(t, err)
+                       require.NotNil(t, db)
+
+                       // Test connection opening (will fail without real 
credentials, but validates options)
+                       _, err = db.Open(context.Background())
+                       if tt.wantErr {
+                               assert.Error(t, err)
+                       } else {
+                               // Even valid options will fail without real 
credentials, so expect error
+                               assert.Error(t, err)
+                       }

Review Comment:
   Um, doesn't this defeat the point of the test? Is there some way to 
differentiate between the failures?



##########
go/adbc/driver/databricks/statement_test.go:
##########
@@ -0,0 +1,55 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks_test
+
+import (
+       "context"
+       "testing"
+
+       "github.com/apache/arrow-adbc/go/adbc/driver/databricks"
+       "github.com/stretchr/testify/assert"
+)
+
+func TestStatementBasic(t *testing.T) {
+       // This is a basic test to ensure the code compiles
+       // Real tests would require a connection to Databricks
+
+       _ = context.Background()
+
+       // Create a driver and database
+       driver := databricks.NewDriver(nil)
+       db, err := driver.NewDatabase(map[string]string{
+               databricks.OptionServerHostname: "mock-host",
+               databricks.OptionAccessToken:    "mock-token",
+               databricks.OptionHTTPPath:       "mock-path",
+       })
+       assert.NoError(t, err)
+       _ = db // Avoid unused variable
+
+       // Note: We can't test the actual statement implementation without a 
real connection
+       // This test just ensures the public API compiles correctly
+       t.Log("Databricks driver public API is correct")
+}
+
+func TestIPCReaderAdapterCompileTime(t *testing.T) {
+       // Test that ipcReaderAdapter implements array.RecordReader
+       // This ensures our interface definitions are correct
+
+       // This is a compile-time check - if it compiles, the test passes
+       t.Log("IPC reader adapter implements required interfaces")
+}

Review Comment:
   unfinished test?



##########
go/adbc/driver/databricks/statement_test.go:
##########
@@ -0,0 +1,55 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks_test
+
+import (
+       "context"
+       "testing"
+
+       "github.com/apache/arrow-adbc/go/adbc/driver/databricks"
+       "github.com/stretchr/testify/assert"
+)
+
+func TestStatementBasic(t *testing.T) {
+       // This is a basic test to ensure the code compiles
+       // Real tests would require a connection to Databricks
+
+       _ = context.Background()
+
+       // Create a driver and database
+       driver := databricks.NewDriver(nil)
+       db, err := driver.NewDatabase(map[string]string{
+               databricks.OptionServerHostname: "mock-host",
+               databricks.OptionAccessToken:    "mock-token",
+               databricks.OptionHTTPPath:       "mock-path",
+       })
+       assert.NoError(t, err)
+       _ = db // Avoid unused variable

Review Comment:
   maybe `defer CheckedClose(t, db)`?



##########
go/adbc/driver/databricks/driver.go:
##########
@@ -0,0 +1,123 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// Package databricks is an ADBC Driver Implementation for Databricks
+// SQL using databricks-sql-go as the underlying SQL driver.
+//
+// It can be used to register a driver for database/sql by importing
+// github.com/apache/arrow-adbc/go/adbc/sqldriver and running:
+//
+//     sql.Register("databricks", sqldriver.Driver{databricks.Driver{}})
+//
+// You can then open a databricks connection with the database/sql
+// standard package by using:
+//
+//     db, err := sql.Open("databricks", 
"token=<token>&hostname=<hostname>&port=<port>&httpPath=<path>")
+package databricks
+
+import (
+       "context"
+       "runtime/debug"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow-adbc/go/adbc/driver/internal/driverbase"
+       "github.com/apache/arrow-go/v18/arrow/memory"
+)
+
+const (
+       // Connection options
+       OptionServerHostname = "adbc.databricks.server_hostname"

Review Comment:
   while it's been the convention so far and while I suppose we won't type the 
actual string very often
   
   I've started to wonder if we can't just make the option `databricks...` 
without having to prefix everything with `adbc`, and save a cycle or two of 
redundant comparisons



##########
go/adbc/driver/databricks/statement.go:
##########
@@ -0,0 +1,275 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks
+
+import (
+       "context"
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+       "reflect"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow-go/v18/arrow"
+       "github.com/apache/arrow-go/v18/arrow/array"
+
+       dbsqlrows "github.com/databricks/databricks-sql-go/rows"
+)
+
+type statementImpl struct {
+       conn       *connectionImpl
+       query      string
+       parameters []interface{}
+       prepared   *sql.Stmt
+}
+
+func (s *statementImpl) Close() error {
+       if s.prepared != nil {
+               return s.prepared.Close()
+       }
+       return nil
+}
+
+func (s *statementImpl) SetOption(key, val string) error {
+       // No statement-specific options are supported yet
+       return adbc.Error{
+               Code: adbc.StatusNotImplemented,
+               Msg:  fmt.Sprintf("unsupported statement option: %s", key),
+       }
+}
+
+func (s *statementImpl) SetSqlQuery(query string) error {
+       s.query = query
+       // Reset prepared statement if query changes
+       if s.prepared != nil {
+               _ = s.prepared.Close() // Ignore error on cleanup
+               s.prepared = nil
+       }
+       return nil
+}
+
+func (s *statementImpl) Prepare(ctx context.Context) error {
+       if s.query == "" {
+               return adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       stmt, err := s.conn.db.PrepareContext(ctx, s.query)
+       if err != nil {
+               return adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  fmt.Sprintf("failed to prepare statement: %v", 
err),
+               }
+       }
+
+       s.prepared = stmt
+       return nil
+}
+
+func (s *statementImpl) ExecuteQuery(ctx context.Context) (array.RecordReader, 
int64, error) {
+       if s.query == "" {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       // Get raw connection to access Arrow batches directly
+       conn, err := s.conn.db.Conn(ctx)
+       if err != nil {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to get raw connection: %v", 
err),
+               }
+       }
+       defer func() { _ = conn.Close() }()
+
+       // Execute query using raw driver interface to get Arrow batches
+       var driverRows driver.Rows
+       err = conn.Raw(func(driverConn interface{}) error {
+               // Use raw driver interface for direct Arrow access
+               if queryerCtx, ok := driverConn.(driver.QueryerContext); ok {
+                       // Convert parameters to driver.NamedValue slice
+                       var driverArgs []driver.NamedValue
+                       for i, param := range s.parameters {
+                               driverArgs = append(driverArgs, 
driver.NamedValue{
+                                       Ordinal: i + 1,
+                                       Value:   param,
+                               })
+                       }
+                       driverRows, err = queryerCtx.QueryContext(ctx, s.query, 
driverArgs)
+                       return err
+               }
+               return fmt.Errorf("driver does not support QueryerContext")
+       })
+
+       if err != nil {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to execute query: %v", err),
+               }
+       }
+       defer func() { _ = driverRows.Close() }()
+
+       // Convert to databricks rows interface to get Arrow batches
+       databricksRows, ok := driverRows.(dbsqlrows.Rows)
+       if !ok {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  "driver rows do not support Arrow batches",
+               }
+       }
+
+       // Use the IPC stream interface (zero-copy)
+       reader, err := newIPCReaderAdapter(ctx, databricksRows)
+       if err != nil {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to create IPC reader adapter: 
%v", err),
+               }
+       }
+
+       // Return -1 for rowsAffected (unknown) since we can't count without 
consuming
+       // The ADBC spec allows -1 to indicate "unknown number of rows affected"
+       return reader, -1, nil
+}
+
+func (s *statementImpl) ExecuteUpdate(ctx context.Context) (int64, error) {
+       var result sql.Result
+       var err error
+
+       if s.prepared != nil {
+               result, err = s.prepared.ExecContext(ctx, s.parameters...)
+       } else if s.query != "" {
+               result, err = s.conn.db.ExecContext(ctx, s.query, 
s.parameters...)
+       } else {
+               return -1, adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       if err != nil {
+               return -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to execute update: %v", err),
+               }
+       }
+
+       rowsAffected, err := result.RowsAffected()
+       if err != nil {
+               return -1, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to get rows affected: %v", 
err),
+               }
+       }
+
+       return rowsAffected, nil
+}
+
+func (s *statementImpl) Bind(ctx context.Context, values arrow.Record) error {
+       // Convert Arrow record to parameters
+       s.parameters = make([]interface{}, values.NumCols())
+
+       for i := 0; i < int(values.NumCols()); i++ {
+               col := values.Column(i)
+               if col.Len() == 0 {
+                       s.parameters[i] = nil
+                       continue
+               }
+
+               // Take the first value from each column
+               value, err := s.arrowToGoValue(col, 0)
+               if err != nil {
+                       return adbc.Error{
+                               Code: adbc.StatusInvalidArgument,
+                               Msg:  fmt.Sprintf("failed to convert parameter 
%d: %v", i, err),
+                       }
+               }
+               s.parameters[i] = value
+       }
+
+       return nil
+}
+
+func (s *statementImpl) BindStream(ctx context.Context, stream 
array.RecordReader) error {
+       // For simplicity, we'll just bind the first record
+       if stream.Next() {
+               return s.Bind(ctx, stream.Record())
+       }
+       return nil
+}

Review Comment:
   That...would be surprising to run into. Usually the drivers store a stream 
and convert records to streams, not the other way around



##########
go/adbc/driver/databricks/statement.go:
##########
@@ -0,0 +1,275 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks
+
+import (
+       "context"
+       "database/sql"
+       "database/sql/driver"
+       "fmt"
+       "reflect"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow-go/v18/arrow"
+       "github.com/apache/arrow-go/v18/arrow/array"
+
+       dbsqlrows "github.com/databricks/databricks-sql-go/rows"
+)
+
+type statementImpl struct {
+       conn       *connectionImpl
+       query      string
+       parameters []interface{}
+       prepared   *sql.Stmt
+}
+
+func (s *statementImpl) Close() error {
+       if s.prepared != nil {
+               return s.prepared.Close()
+       }
+       return nil
+}
+
+func (s *statementImpl) SetOption(key, val string) error {
+       // No statement-specific options are supported yet
+       return adbc.Error{
+               Code: adbc.StatusNotImplemented,
+               Msg:  fmt.Sprintf("unsupported statement option: %s", key),
+       }
+}
+
+func (s *statementImpl) SetSqlQuery(query string) error {
+       s.query = query
+       // Reset prepared statement if query changes
+       if s.prepared != nil {
+               _ = s.prepared.Close() // Ignore error on cleanup
+               s.prepared = nil
+       }
+       return nil
+}
+
+func (s *statementImpl) Prepare(ctx context.Context) error {
+       if s.query == "" {
+               return adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       stmt, err := s.conn.db.PrepareContext(ctx, s.query)
+       if err != nil {
+               return adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  fmt.Sprintf("failed to prepare statement: %v", 
err),
+               }
+       }
+
+       s.prepared = stmt
+       return nil
+}
+
+func (s *statementImpl) ExecuteQuery(ctx context.Context) (array.RecordReader, 
int64, error) {
+       if s.query == "" {
+               return nil, -1, adbc.Error{
+                       Code: adbc.StatusInvalidState,
+                       Msg:  "no query set",
+               }
+       }
+
+       // Get raw connection to access Arrow batches directly
+       conn, err := s.conn.db.Conn(ctx)

Review Comment:
   Hmm, are we potentially getting a different connection from the pool each 
time? Wouldn't that surprise users?



##########
go/adbc/driver/databricks/database.go:
##########
@@ -0,0 +1,246 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks
+
+import (
+       "context"
+       "fmt"
+       "strconv"
+       "time"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow-adbc/go/adbc/driver/internal/driverbase"
+)
+
+type databaseImpl struct {
+       driverbase.DatabaseImplBase
+
+       // Connection parameters
+       serverHostname string
+       httpPath       string
+       accessToken    string
+       port           string
+       catalog        string
+       schema         string
+
+       // Query options
+       queryTimeout        time.Duration
+       maxRows             int64
+       queryRetryCount     int
+       downloadThreadCount int
+
+       // TLS/SSL options
+       sslMode     string
+       sslRootCert string
+
+       // OAuth options (for future expansion)
+       oauthClientID     string
+       oauthClientSecret string
+       oauthRefreshToken string
+}
+
+func (d *databaseImpl) Open(ctx context.Context) (adbc.Connection, error) {
+       if d.serverHostname == "" {
+               return nil, adbc.Error{
+                       Code: adbc.StatusInvalidArgument,
+                       Msg:  "server hostname is required",
+               }
+       }
+
+       if d.httpPath == "" {
+               return nil, adbc.Error{
+                       Code: adbc.StatusInvalidArgument,
+                       Msg:  "HTTP path is required",
+               }
+       }
+
+       if d.accessToken == "" {
+               return nil, adbc.Error{
+                       Code: adbc.StatusInvalidArgument,
+                       Msg:  "access token is required",
+               }
+       }
+
+       conn := &connectionImpl{
+               ConnectionImplBase:  
driverbase.NewConnectionImplBase(&d.DatabaseImplBase),
+               serverHostname:      d.serverHostname,
+               httpPath:            d.httpPath,
+               accessToken:         d.accessToken,
+               port:                d.port,
+               catalog:             d.catalog,
+               dbSchema:            d.schema,
+               queryTimeout:        d.queryTimeout,
+               maxRows:             d.maxRows,
+               queryRetryCount:     d.queryRetryCount,
+               downloadThreadCount: d.downloadThreadCount,
+               sslMode:             d.sslMode,
+               sslRootCert:         d.sslRootCert,
+               oauthClientID:       d.oauthClientID,
+               oauthClientSecret:   d.oauthClientSecret,
+               oauthRefreshToken:   d.oauthRefreshToken,

Review Comment:
   If we're just going to forward them all, maybe factor into a separate struct 
that can be passed with a `Validate` method or something?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: github-unsubscr...@arrow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to