This is an automated email from the ASF dual-hosted git repository.

curth pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-adbc.git


The following commit(s) were added to refs/heads/main by this push:
     new 7e4dcfa39 fix(csharp/src/Drivers/Apache): correctly handle empty 
response and add Client tests (#2275)
7e4dcfa39 is described below

commit 7e4dcfa39d12027c2361dc1c06d124c7ad3bc9f3
Author: Bruce Irschick <[email protected]>
AuthorDate: Fri Oct 25 10:56:02 2024 -0700

    fix(csharp/src/Drivers/Apache): correctly handle empty response and add 
Client tests (#2275)
    
    * Fixes issue of returning an empty RecordBatch when an empty response
    is return from the server.
    * Adds test to check for empty result in both the AdbcStatement and
    AdbcDataReader
    * Add Client tests for the Spark driver.
    closes #2233
---
 .../Drivers/Apache/Hive2/HiveServer2Connection.cs  |   2 +-
 .../src/Drivers/Apache/Hive2/HiveServer2Reader.cs  |  26 +--
 .../src/Drivers/Apache/Impala/ImpalaConnection.cs  |   2 +-
 .../Apache/Spark/SparkDatabricksConnection.cs      |   2 +-
 .../Drivers/Apache/Spark/SparkHttpConnection.cs    |   2 +-
 csharp/test/Apache.Arrow.Adbc.Tests/ClientTests.cs |  16 +-
 csharp/test/Apache.Arrow.Adbc.Tests/DriverTests.cs |   4 +
 csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs    |   5 +
 .../Apache.Arrow.Adbc.Tests/TestEnvironment.cs     |   2 +
 .../Drivers/Apache/Impala/ImpalaTestEnvironment.cs |   2 +
 .../Apache/Spark/BinaryBooleanValueTests.cs        |   3 +
 csharp/test/Drivers/Apache/Spark/ClientTests.cs    | 225 +++++++++++++++++++++
 csharp/test/Drivers/Apache/Spark/DriverTests.cs    |  18 +-
 .../Drivers/Apache/Spark/SparkTestEnvironment.cs   | 151 +++++++++++++-
 14 files changed, 436 insertions(+), 24 deletions(-)

diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2Connection.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2Connection.cs
index 2853bbe04..c839bbaa7 100644
--- a/csharp/src/Drivers/Apache/Hive2/HiveServer2Connection.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2Connection.cs
@@ -98,7 +98,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
 
         internal abstract SchemaParser SchemaParser { get; }
 
-        internal abstract IArrowArrayStream NewReader<T>(T statement, Schema 
schema, CancellationToken cancellationToken = default) where T : 
HiveServer2Statement;
+        internal abstract IArrowArrayStream NewReader<T>(T statement, Schema 
schema) where T : HiveServer2Statement;
 
         public override IArrowArrayStream GetObjects(GetObjectsDepth depth, 
string? catalogPattern, string? dbSchemaPattern, string? tableNamePattern, 
IReadOnlyList<string>? tableTypes, string? columnNamePattern)
         {
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs
index 5c07d8483..e1b711aba 100644
--- a/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs
@@ -67,8 +67,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
         public HiveServer2Reader(
             HiveServer2Statement statement,
             Schema schema,
-            DataTypeConversion dataTypeConversion,
-            CancellationToken cancellationToken = default)
+            DataTypeConversion dataTypeConversion)
         {
             _statement = statement;
             Schema = schema;
@@ -88,22 +87,20 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
             // Await the fetch response
             TFetchResultsResp response = await FetchNext(_statement, 
cancellationToken);
 
-            // Build the current batch
-            RecordBatch result = CreateBatch(response, out int fetchedRows);
-
-            if ((_statement.BatchSize > 0 && fetchedRows < 
_statement.BatchSize) || fetchedRows == 0)
+            int columnCount = GetColumnCount(response);
+            int rowCount = GetRowCount(response, columnCount);
+            if ((_statement.BatchSize > 0 && rowCount < _statement.BatchSize) 
|| rowCount == 0)
             {
                 // This is the last batch
                 _statement = null;
             }
 
-            // Return the current batch.
-            return result;
+            // Build the current batch, if any data exists
+            return rowCount > 0 ? CreateBatch(response, columnCount, rowCount) 
: null;
         }
 
-        private RecordBatch CreateBatch(TFetchResultsResp response, out int 
length)
+        private RecordBatch CreateBatch(TFetchResultsResp response, int 
columnCount, int rowCount)
         {
-            int columnCount = response.Results.Columns.Count;
             IList<IArrowArray> columnData = [];
             bool shouldConvertScalar = 
_dataTypeConversion.HasFlag(DataTypeConversion.Scalar);
             for (int i = 0; i < columnCount; i++)
@@ -113,10 +110,15 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
                 columnData.Add(columnArray);
             }
 
-            length = columnCount > 0 ? 
GetArray(response.Results.Columns[0]).Length : 0;
-            return new RecordBatch(Schema, columnData, length);
+            return new RecordBatch(Schema, columnData, rowCount);
         }
 
+        private static int GetColumnCount(TFetchResultsResp response) =>
+            response.Results.Columns.Count;
+
+        private static int GetRowCount(TFetchResultsResp response, int 
columnCount) =>
+            columnCount > 0 ? GetArray(response.Results.Columns[0]).Length : 0;
+
         private static async Task<TFetchResultsResp> 
FetchNext(HiveServer2Statement statement, CancellationToken cancellationToken = 
default)
         {
             var request = new TFetchResultsReq(statement.OperationHandle, 
TFetchOrientation.FETCH_NEXT, statement.BatchSize);
diff --git a/csharp/src/Drivers/Apache/Impala/ImpalaConnection.cs 
b/csharp/src/Drivers/Apache/Impala/ImpalaConnection.cs
index a700d0f6b..c6c6cc796 100644
--- a/csharp/src/Drivers/Apache/Impala/ImpalaConnection.cs
+++ b/csharp/src/Drivers/Apache/Impala/ImpalaConnection.cs
@@ -87,6 +87,6 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
 
         internal override SchemaParser SchemaParser { get; } = new 
HiveServer2SchemaParser();
 
-        internal override IArrowArrayStream NewReader<T>(T statement, Schema 
schema, CancellationToken cancellationToken = default) => new 
HiveServer2Reader(statement, schema, dataTypeConversion: DataTypeConversion, 
cancellationToken);
+        internal override IArrowArrayStream NewReader<T>(T statement, Schema 
schema) => new HiveServer2Reader(statement, schema, dataTypeConversion: 
DataTypeConversion);
     }
 }
diff --git a/csharp/src/Drivers/Apache/Spark/SparkDatabricksConnection.cs 
b/csharp/src/Drivers/Apache/Spark/SparkDatabricksConnection.cs
index f88d500e3..764027198 100644
--- a/csharp/src/Drivers/Apache/Spark/SparkDatabricksConnection.cs
+++ b/csharp/src/Drivers/Apache/Spark/SparkDatabricksConnection.cs
@@ -30,7 +30,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
         {
         }
 
-        internal override IArrowArrayStream NewReader<T>(T statement, Schema 
schema, CancellationToken cancellationToken = default) => new 
SparkDatabricksReader(statement, schema);
+        internal override IArrowArrayStream NewReader<T>(T statement, Schema 
schema) => new SparkDatabricksReader(statement, schema);
 
         internal override SchemaParser SchemaParser => new 
SparkDatabricksSchemaParser();
 
diff --git a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs 
b/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
index f67b3316e..9d34ac75c 100644
--- a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
+++ b/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
@@ -129,7 +129,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
             }
         }
 
-        internal override IArrowArrayStream NewReader<T>(T statement, Schema 
schema, CancellationToken cancellationToken = default) => new 
HiveServer2Reader(statement, schema, dataTypeConversion: 
statement.Connection.DataTypeConversion, cancellationToken);
+        internal override IArrowArrayStream NewReader<T>(T statement, Schema 
schema) => new HiveServer2Reader(statement, schema, dataTypeConversion: 
statement.Connection.DataTypeConversion);
 
         protected override Task<TTransport> CreateTransportAsync()
         {
diff --git a/csharp/test/Apache.Arrow.Adbc.Tests/ClientTests.cs 
b/csharp/test/Apache.Arrow.Adbc.Tests/ClientTests.cs
index d5a3b117c..d35ebdd10 100644
--- a/csharp/test/Apache.Arrow.Adbc.Tests/ClientTests.cs
+++ b/csharp/test/Apache.Arrow.Adbc.Tests/ClientTests.cs
@@ -72,20 +72,22 @@ namespace Apache.Arrow.Adbc.Tests
         /// </summary>
         /// <param name="adbcConnection">The <see 
cref="Adbc.Client.AdbcConnection"/> to use.</param>
         /// <param name="testConfiguration">The <see 
cref="TestConfiguration"/> to use</param>
-        public static void CanClientGetSchema(Adbc.Client.AdbcConnection 
adbcConnection, TestConfiguration testConfiguration)
+        /// <param name="customQuery">The custom query to use instead of query 
from <see cref="TestConfiguration.Query" /></param>"/>
+        /// <param name="expectedColumnCount">The custom column count to use 
instead of query from <see cref="TestMetadata.ExpectedColumnCount" /></param>
+        public static void CanClientGetSchema(Adbc.Client.AdbcConnection 
adbcConnection, TestConfiguration testConfiguration, string? customQuery = 
default, int? expectedColumnCount = default)
         {
             if (adbcConnection == null) throw new 
ArgumentNullException(nameof(adbcConnection));
             if (testConfiguration == null) throw new 
ArgumentNullException(nameof(testConfiguration));
 
             adbcConnection.Open();
 
-            using AdbcCommand adbcCommand = new 
AdbcCommand(testConfiguration.Query, adbcConnection);
+            using AdbcCommand adbcCommand = new AdbcCommand(customQuery ?? 
testConfiguration.Query, adbcConnection);
             using AdbcDataReader reader = 
adbcCommand.ExecuteReader(CommandBehavior.SchemaOnly);
 
             DataTable? table = reader.GetSchemaTable();
 
             // there is one row per field
-            Assert.Equal(testConfiguration.Metadata.ExpectedColumnCount, 
table?.Rows.Count);
+            Assert.Equal(expectedColumnCount ?? 
testConfiguration.Metadata.ExpectedColumnCount, table?.Rows.Count);
         }
 
         /// <summary>
@@ -98,7 +100,9 @@ namespace Apache.Arrow.Adbc.Tests
         public static void CanClientExecuteQuery(
             Adbc.Client.AdbcConnection adbcConnection,
             TestConfiguration testConfiguration,
-            Action<AdbcCommand>? additionalCommandOptionsSetter = null)
+            Action<AdbcCommand>? additionalCommandOptionsSetter = null,
+            string? customQuery = default,
+            int? expectedResultsCount = default)
         {
             if (adbcConnection == null) throw new 
ArgumentNullException(nameof(adbcConnection));
             if (testConfiguration == null) throw new 
ArgumentNullException(nameof(testConfiguration));
@@ -107,7 +111,7 @@ namespace Apache.Arrow.Adbc.Tests
 
             adbcConnection.Open();
 
-            using AdbcCommand adbcCommand = new 
AdbcCommand(testConfiguration.Query, adbcConnection);
+            using AdbcCommand adbcCommand = new AdbcCommand(customQuery ?? 
testConfiguration.Query, adbcConnection);
             additionalCommandOptionsSetter?.Invoke(adbcCommand);
             using AdbcDataReader reader = adbcCommand.ExecuteReader();
 
@@ -131,7 +135,7 @@ namespace Apache.Arrow.Adbc.Tests
             }
             finally { reader.Close(); }
 
-            Assert.Equal(testConfiguration.ExpectedResultsCount, count);
+            Assert.Equal(expectedResultsCount ?? 
testConfiguration.ExpectedResultsCount, count);
         }
 
         /// <summary>
diff --git a/csharp/test/Apache.Arrow.Adbc.Tests/DriverTests.cs 
b/csharp/test/Apache.Arrow.Adbc.Tests/DriverTests.cs
index b6205aea1..88b7dcca7 100644
--- a/csharp/test/Apache.Arrow.Adbc.Tests/DriverTests.cs
+++ b/csharp/test/Apache.Arrow.Adbc.Tests/DriverTests.cs
@@ -74,6 +74,10 @@ namespace Apache.Arrow.Adbc.Tests
             while (queryResult.Stream != null)
             {
                 RecordBatch nextBatch = await 
queryResult.Stream.ReadNextRecordBatchAsync();
+                if (expectedNumberOfResults == 0)
+                {
+                    Assert.Null(nextBatch);
+                }
                 if (nextBatch == null) { break; }
                 count += nextBatch.Length;
             }
diff --git a/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs 
b/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs
index d4662c504..46f52584a 100644
--- a/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs
+++ b/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs
@@ -170,6 +170,11 @@ namespace Apache.Arrow.Adbc.Tests
             return queries;
         }
 
+        protected SampleDataBuilder GetSampleDataBuilder()
+        {
+            return TestEnvironment.GetSampleDataBuilder();
+        }
+
         /// <summary>
         /// Gets a the Spark ADBC driver with settings from the <see 
cref="SparkTestConfiguration"/>.
         /// </summary>
diff --git a/csharp/test/Apache.Arrow.Adbc.Tests/TestEnvironment.cs 
b/csharp/test/Apache.Arrow.Adbc.Tests/TestEnvironment.cs
index ad805dee2..374c102f3 100644
--- a/csharp/test/Apache.Arrow.Adbc.Tests/TestEnvironment.cs
+++ b/csharp/test/Apache.Arrow.Adbc.Tests/TestEnvironment.cs
@@ -53,6 +53,8 @@ namespace Apache.Arrow.Adbc.Tests
 
         public abstract AdbcDriver CreateNewDriver();
 
+        public abstract SampleDataBuilder GetSampleDataBuilder();
+
         public abstract Dictionary<string, string> GetDriverParameters(TConfig 
testConfiguration);
 
         public virtual string GetCreateTemporaryTableStatement(string 
tableName, string columns)
diff --git a/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs 
b/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs
index 57efaf380..a81444cb1 100644
--- a/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs
+++ b/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs
@@ -73,5 +73,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Impala
 
         public override string GetInsertStatement(string tableName, string 
columnName, string? value) =>
             string.Format("INSERT INTO {0} ({1}) SELECT {2};", tableName, 
columnName, value ?? "NULL");
+
+        public override SampleDataBuilder GetSampleDataBuilder() => throw new 
NotImplementedException();
     }
 }
diff --git a/csharp/test/Drivers/Apache/Spark/BinaryBooleanValueTests.cs 
b/csharp/test/Drivers/Apache/Spark/BinaryBooleanValueTests.cs
index 5d2c5f2bf..403c4ac01 100644
--- a/csharp/test/Drivers/Apache/Spark/BinaryBooleanValueTests.cs
+++ b/csharp/test/Drivers/Apache/Spark/BinaryBooleanValueTests.cs
@@ -98,6 +98,9 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
         [InlineData("CAST(NULL AS CHAR(10))")]
         [InlineData("CAST(NULL AS BOOLEAN)")]
         [InlineData("CAST(NULL AS BINARY)")]
+        [InlineData("CAST(NULL AS MAP<STRING, INT>)")]
+        [InlineData("CAST(NULL AS STRUCT<NAME: STRING>)")]
+        [InlineData("CAST(NULL AS ARRAY<INT>)")]
         public async Task TestNullData(string projectionClause)
         {
             string selectStatement = $"SELECT {projectionClause};";
diff --git a/csharp/test/Drivers/Apache/Spark/ClientTests.cs 
b/csharp/test/Drivers/Apache/Spark/ClientTests.cs
new file mode 100644
index 000000000..28c80d8f0
--- /dev/null
+++ b/csharp/test/Drivers/Apache/Spark/ClientTests.cs
@@ -0,0 +1,225 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Collections.Generic;
+using Apache.Arrow.Adbc.Drivers.Apache.Spark;
+using Apache.Arrow.Adbc.Tests.Xunit;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
+{
+    /// <summary>
+    /// Class for testing the ADBC Client using the Spark ADBC driver.
+    /// </summary>
+    /// <remarks>
+    /// Tests are ordered to ensure data is created for the other
+    /// queries to run.
+    /// </remarks>
+    [TestCaseOrderer("Apache.Arrow.Adbc.Tests.Xunit.TestOrderer", 
"Apache.Arrow.Adbc.Tests")]
+    public class ClientTests : TestBase<SparkTestConfiguration, 
SparkTestEnvironment>
+    {
+        public ClientTests(ITestOutputHelper? outputHelper) : 
base(outputHelper, new SparkTestEnvironment.Factory())
+        {
+            Skip.IfNot(Utils.CanExecuteTestConfig(TestConfigVariable));
+        }
+
+        /// <summary>
+        /// Validates if the client execute updates.
+        /// </summary>
+        [SkippableFact, Order(1)]
+        public void CanClientExecuteUpdate()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                adbcConnection.Open();
+
+                string[] queries = GetQueries();
+                int affectedRows = ValidateAffectedRows ? 1 : -1;
+
+                List<int> expectedResults = TestEnvironment.ServerType != 
SparkServerType.Databricks
+                    ? [
+                        -1, // DROP   TABLE
+                        -1, // CREATE TABLE
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        //1,  // UPDATE
+                        //1,  // DELETE
+                    ]
+                    : [
+                        -1, // DROP   TABLE
+                        -1, // CREATE TABLE
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        affectedRows,  // UPDATE
+                        affectedRows,  // DELETE
+                    ];
+
+                Tests.ClientTests.CanClientExecuteUpdate(adbcConnection, 
TestConfiguration, queries, expectedResults);
+            }
+        }
+
+        /// <summary>
+        /// Validates if the client can get the schema.
+        /// </summary>
+        [SkippableFact, Order(2)]
+        public void CanClientGetSchema()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                Tests.ClientTests.CanClientGetSchema(adbcConnection, 
TestConfiguration, $"SELECT * FROM {TestConfiguration.Metadata.Table}");
+            }
+        }
+
+        /// <summary>
+        /// Validates if the client can connect to a live server and
+        /// parse the results.
+        /// </summary>
+        [SkippableFact, Order(3)]
+        public void CanClientExecuteQuery()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                Tests.ClientTests.CanClientExecuteQuery(adbcConnection, 
TestConfiguration);
+            }
+        }
+
+        /// <summary>
+        /// Validates if the client can connect to a live server and
+        /// parse the results.
+        /// </summary>
+        [SkippableFact, Order(5)]
+        public void CanClientExecuteEmptyQuery()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                Tests.ClientTests.CanClientExecuteQuery(
+                    adbcConnection,
+                    TestConfiguration,
+                    customQuery: $"SELECT * FROM 
{TestConfiguration.Metadata.Table} WHERE FALSE",
+                    expectedResultsCount: 0);
+            }
+        }
+
+        /// <summary>
+        /// Validates if the client is retrieving and converting values
+        /// to the expected types.
+        /// </summary>
+        [SkippableFact, Order(4)]
+        public void VerifyTypesAndValues()
+        {
+            using (Adbc.Client.AdbcConnection dbConnection = 
GetAdbcConnection())
+            {
+                SampleDataBuilder sampleDataBuilder = GetSampleDataBuilder();
+
+                Tests.ClientTests.VerifyTypesAndValues(dbConnection, 
sampleDataBuilder);
+            }
+        }
+
+        [SkippableFact]
+        public void VerifySchemaTablesWithNoConstraints()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection(includeTableConstraints: false))
+            {
+                adbcConnection.Open();
+
+                string schema = "Tables";
+
+                var tables = adbcConnection.GetSchema(schema);
+
+                Assert.True(tables.Rows.Count > 0, $"No tables were found in 
the schema '{schema}'");
+            }
+        }
+
+        [SkippableFact]
+        public void VerifySchemaTables()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                adbcConnection.Open();
+
+                var collections = 
adbcConnection.GetSchema("MetaDataCollections");
+                Assert.Equal(7, collections.Rows.Count);
+                Assert.Equal(2, collections.Columns.Count);
+
+                var restrictions = adbcConnection.GetSchema("Restrictions");
+                Assert.Equal(11, restrictions.Rows.Count);
+                Assert.Equal(3, restrictions.Columns.Count);
+
+                var catalogs = adbcConnection.GetSchema("Catalogs");
+                Assert.Single(catalogs.Columns);
+                var catalog = (string?)catalogs.Rows[0].ItemArray[0];
+
+                catalogs = adbcConnection.GetSchema("Catalogs", new[] { 
catalog });
+                Assert.Equal(1, catalogs.Rows.Count);
+
+                string random = "X" + Guid.NewGuid().ToString("N");
+
+                catalogs = adbcConnection.GetSchema("Catalogs", new[] { random 
});
+                Assert.Equal(0, catalogs.Rows.Count);
+
+                var schemas = adbcConnection.GetSchema("Schemas", new[] { 
catalog });
+                Assert.Equal(2, schemas.Columns.Count);
+                var schema = (string?)schemas.Rows[0].ItemArray[1];
+
+                schemas = adbcConnection.GetSchema("Schemas", new[] { catalog, 
schema });
+                Assert.Equal(1, schemas.Rows.Count);
+
+                schemas = adbcConnection.GetSchema("Schemas", new[] { random 
});
+                Assert.Equal(0, schemas.Rows.Count);
+
+                schemas = adbcConnection.GetSchema("Schemas", new[] { catalog, 
random });
+                Assert.Equal(0, schemas.Rows.Count);
+
+                schemas = adbcConnection.GetSchema("Schemas", new[] { random, 
random });
+                Assert.Equal(0, schemas.Rows.Count);
+
+                var tableTypes = adbcConnection.GetSchema("TableTypes");
+                Assert.Single(tableTypes.Columns);
+
+                var tables = adbcConnection.GetSchema("Tables", new[] { 
catalog, schema });
+                Assert.Equal(4, tables.Columns.Count);
+
+                tables = adbcConnection.GetSchema("Tables", new[] { catalog, 
random });
+                Assert.Equal(0, tables.Rows.Count);
+
+                tables = adbcConnection.GetSchema("Tables", new[] { random, 
schema });
+                Assert.Equal(0, tables.Rows.Count);
+
+                tables = adbcConnection.GetSchema("Tables", new[] { random, 
random });
+                Assert.Equal(0, tables.Rows.Count);
+
+                tables = adbcConnection.GetSchema("Tables", new[] { catalog, 
schema, random });
+                Assert.Equal(0, tables.Rows.Count);
+
+                var columns = adbcConnection.GetSchema("Columns", new[] { 
catalog, schema });
+                Assert.Equal(16, columns.Columns.Count);
+            }
+        }
+
+        private Adbc.Client.AdbcConnection GetAdbcConnection(bool 
includeTableConstraints = true)
+        {
+            return new Adbc.Client.AdbcConnection(
+                NewDriver, GetDriverParameters(TestConfiguration),
+                []
+            );
+        }
+    }
+}
diff --git a/csharp/test/Drivers/Apache/Spark/DriverTests.cs 
b/csharp/test/Drivers/Apache/Spark/DriverTests.cs
index 19a787455..880f74888 100644
--- a/csharp/test/Drivers/Apache/Spark/DriverTests.cs
+++ b/csharp/test/Drivers/Apache/Spark/DriverTests.cs
@@ -31,7 +31,7 @@ using ColumnTypeId = 
Apache.Arrow.Adbc.Drivers.Apache.Spark.SparkConnection.Colu
 namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
 {
     /// <summary>
-    /// Class for testing the Snowflake ADBC driver connection tests.
+    /// Class for testing the Spark ADBC driver connection tests.
     /// </summary>
     /// <remarks>
     /// Tests are ordered to ensure data is created for the other
@@ -630,6 +630,22 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
             OutputHelper?.WriteLine(exception.Message);
         }
 
+        /// <summary>
+        /// Validates if the driver can connect to a live server and
+        /// parse the results using the asynchronous methods.
+        /// </summary>
+        [SkippableFact, Order(15)]
+        public async Task CanExecuteQueryAsyncEmptyResult()
+        {
+            using AdbcConnection adbcConnection = NewConnection();
+            using AdbcStatement statement = adbcConnection.CreateStatement();
+
+            statement.SqlQuery = $"SELECT * from 
{TestConfiguration.Metadata.Table} WHERE FALSE";
+            QueryResult queryResult = await statement.ExecuteQueryAsync();
+
+            await Tests.DriverTests.CanExecuteQueryAsync(queryResult, 0);
+        }
+
         public static IEnumerable<object[]> CatalogNamePatternData()
         {
             string? catalogName = new 
DriverTests(null).TestConfiguration?.Metadata?.Catalog;
diff --git a/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs 
b/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
index 1b79facf4..247d6d00b 100644
--- a/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
+++ b/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
@@ -17,8 +17,11 @@
 
 using System;
 using System.Collections.Generic;
+using System.Data.SqlTypes;
+using System.Text;
 using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
 using Apache.Arrow.Adbc.Drivers.Apache.Spark;
+using Apache.Arrow.Types;
 
 namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
 {
@@ -26,7 +29,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
     {
         public class Factory : Factory<SparkTestEnvironment>
         {
-            public override SparkTestEnvironment Create(Func<AdbcConnection> 
getConnection) => new SparkTestEnvironment(getConnection);
+            public override SparkTestEnvironment Create(Func<AdbcConnection> 
getConnection) => new(getConnection);
         }
 
         private SparkTestEnvironment(Func<AdbcConnection> getConnection) : 
base(getConnection) { }
@@ -132,5 +135,151 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
 
         public override string GetInsertStatement(string tableName, string 
columnName, string? value) =>
             string.Format("INSERT INTO {0} ({1}) SELECT {2};", tableName, 
columnName, value ?? "NULL");
+
+        public override SampleDataBuilder GetSampleDataBuilder()
+        {
+            SampleDataBuilder sampleDataBuilder = new();
+
+            // standard values
+            sampleDataBuilder.Samples.Add(
+                new SampleData()
+                {
+                    Query = "SELECT " +
+                            "CAST(1 as BIGINT) as id, " +
+                            "CAST(2 as INTEGER) as int, " +
+                            "CAST(1.23 as FLOAT) as number_float, " +
+                            "CAST(4.56 as DOUBLE) as number_double, " +
+                            "4.56BD as decimal, " +
+                            "9.9999999999999999999999999999999999999BD as 
big_decimal, " +
+                            "CAST(True as BOOLEAN) as is_active, " +
+                            "'John Doe' as name, " +
+                            "X'616263313233' as data, " +
+                            "DATE '2023-09-08' as date, " +
+                            "TIMESTAMP '2023-09-08 12:34:56+00:00' as 
timestamp, " +
+                            "INTERVAL 178956969 YEAR 11 MONTH as interval, " +
+                            "ARRAY(1, 2, 3) as numbers, " +
+                            "STRUCT('John Doe' as name, 30 as age) as person," 
+
+                            "MAP('name', CAST('Jane Doe' AS STRING), 'age', 
CAST(29 AS INT)) as map",
+                    ExpectedValues =
+                    [
+                        new("id", typeof(long), typeof(Int64Type), 1L),
+                        new("int", typeof(int), typeof(Int32Type), 2),
+                        new("number_float", typeof(double), 
typeof(DoubleType), 1.23d),
+                        new("number_double", typeof(double), 
typeof(DoubleType), 4.56d),
+                        new("decimal", typeof(SqlDecimal), 
typeof(Decimal128Type), SqlDecimal.Parse("4.56")),
+                        new("big_decimal", typeof(SqlDecimal), 
typeof(Decimal128Type), 
SqlDecimal.Parse("9.9999999999999999999999999999999999999")),
+                        new("is_active", typeof(bool), typeof(BooleanType), 
true),
+                        new("name", typeof(string), typeof(StringType), "John 
Doe"),
+                        new("data", typeof(byte[]), typeof(BinaryType), 
UTF8Encoding.UTF8.GetBytes("abc123")),
+                        new("date", typeof(DateTime), typeof(Date32Type), new 
DateTime(2023, 9, 8)),
+                        new("timestamp", typeof(DateTimeOffset), 
typeof(TimestampType), new DateTimeOffset(new DateTime(2023, 9, 8, 12, 34, 56), 
TimeSpan.Zero)),
+                        new("interval", typeof(string), typeof(StringType), 
"178956969-11"),
+                        new("numbers", typeof(string), typeof(StringType), 
"[1,2,3]"),
+                        new("person", typeof(string), typeof(StringType), 
"""{"name":"John Doe","age":30}"""),
+                        new("map", typeof(string), typeof(StringType), 
"""{"age":"29","name":"Jane Doe"}""") // This is unexpected JSON. Expecting 29 
to be a numeric and not string.
+                    ]
+                });
+
+            sampleDataBuilder.Samples.Add(
+                new SampleData()
+                {
+                    Query = "SELECT  " +
+                            "CAST(NULL as BIGINT) as id, " +
+                            "CAST(NULL as INTEGER) as int, " +
+                            "CAST(NULL as FLOAT) as number_float, " +
+                            "CAST(NULL as DOUBLE) as number_double, " +
+                            "CAST(NULL as DECIMAL(38,2)) as decimal,  " +
+                            "CAST(NULL as BOOLEAN) as is_active, " +
+                            "CAST(NULL as STRING)  as name, " +
+                            "CAST(NULL as BINARY) as data, " +
+                            "CAST(NULL as DATE) as date, " +
+                            "CAST(NULL as TIMESTAMP) as timestamp," +
+                            "CAST(NULL as MAP<STRING, INTEGER>) as map, " +
+                            "CAST(NULL as ARRAY<INTEGER>) as numbers, " +
+                            "CAST(NULL as STRUCT<field: STRING>) as person, " +
+                            "MAP(CAST('EMPTY' as STRING), CAST(NULL as 
INTEGER)) as map_null, " +
+                            "ARRAY(NULL,NULL,NULL) as numbers_null, " +
+                            "STRUCT(CAST(NULL as STRING), CAST(NULL as 
INTEGER)) as person_null",
+                            //"CAST(NULL as STRUCT<field: STRING>) as struct, 
" +
+                            //"STRUCT(CAST(NULL as STRING) as name, CAST(NULL 
as BIGINT) as age) as person",
+                    ExpectedValues =
+                    [
+                        new("id", typeof(long), typeof(Int64Type), null),
+                        new("int", typeof(int), typeof(Int32Type), null),
+                        new("number_float", typeof(double), 
typeof(DoubleType), null),
+                        new("number_double", typeof(double), 
typeof(DoubleType), null),
+                        new("decimal", typeof(SqlDecimal), 
typeof(Decimal128Type), null),
+                        new("is_active", typeof(bool), typeof(BooleanType), 
null),
+                        new("name", typeof(string), typeof(StringType), null),
+                        new("data", typeof(byte[]), typeof(BinaryType), null),
+                        new("date", typeof(DateTime), typeof(Date32Type), 
null),
+                        new("timestamp", typeof(DateTimeOffset), 
typeof(TimestampType), null),
+                        new("map", typeof(string), typeof(StringType), null),
+                        new("numbers", typeof(string), typeof(StringType), 
null),
+                        new("person", typeof(string), typeof(StringType), 
null),
+                        new("map_null", typeof(string), typeof(StringType), 
"""{"EMPTY":null}"""),
+                        new("numbers_null", typeof(string), 
typeof(StringType), """[null,null,null]"""),
+                        new("person_null", typeof(string), typeof(StringType), 
"""{"col1":null,"col2":null}"""),
+                    ]
+                });
+
+            // complex struct
+            sampleDataBuilder.Samples.Add(
+            new SampleData()
+            {
+                Query = "SELECT " +
+                         "STRUCT(" +
+                         "\"Iron Man\" as name," +
+                         "\"Avengers\" as team," +
+                         "ARRAY(\"Genius\", \"Billionaire\", \"Playboy\", 
\"Philanthropist\") as powers," +
+                         "ARRAY(" +
+                         "  STRUCT(" +
+                         "    \"Captain America\" as name, " +
+                         "    \"Avengers\" as team, " +
+                         "    ARRAY(\"Super Soldier Serum\", \"Vibranium 
Shield\") as powers, " +
+                         "    ARRAY(" +
+                         "     STRUCT(" +
+                         "       \"Thanos\" as name, " +
+                         "       \"Black Order\" as team, " +
+                         "       ARRAY(\"Infinity Gauntlet\", \"Super 
Strength\", \"Teleportation\") as powers, " +
+                         "       ARRAY(" +
+                         "         STRUCT(" +
+                         "           \"Loki\" as name, " +
+                         "           \"Asgard\" as team, " +
+                         "           ARRAY(\"Magic\", \"Shapeshifting\", 
\"Trickery\") as powers " +
+                         "          )" +
+                         "        ) as allies" +
+                         "      )" +
+                         "    ) as enemies" +
+                         " )," +
+                         " STRUCT(" +
+                         "    \"Spider-Man\" as name, " +
+                         "    \"Avengers\" as team, " +
+                         "    ARRAY(\"Spider-Sense\", \"Web-Shooting\", 
\"Wall-Crawling\") as powers, " +
+                         "    ARRAY(" +
+                         "      STRUCT(" +
+                         "        \"Green Goblin\" as name, " +
+                         "        \"Sinister Six\" as team, " +
+                         "        ARRAY(\"Glider\", \"Pumpkin Bombs\", \"Super 
Strength\") as powers, " +
+                         "         ARRAY(" +
+                         "          STRUCT(" +
+                         "            \"Doctor Octopus\" as name, " +
+                         "            \"Sinister Six\" as team, " +
+                         "            ARRAY(\"Mechanical Arms\", \"Genius\", 
\"Madness\") as powers " +
+                         "          )" +
+                         "        ) as allies" +
+                         "      )" +
+                         "    ) as enemies" +
+                         "  )" +
+                         " ) as friends" +
+                         ") as iron_man",
+                ExpectedValues =
+                   [
+                        new("iron_man", typeof(string), typeof(StringType), 
"{\"name\":\"Iron 
Man\",\"team\":\"Avengers\",\"powers\":[\"Genius\",\"Billionaire\",\"Playboy\",\"Philanthropist\"],\"friends\":[{\"name\":\"Captain
 America\",\"team\":\"Avengers\",\"powers\":[\"Super Soldier 
Serum\",\"Vibranium 
Shield\"],\"enemies\":[{\"name\":\"Thanos\",\"team\":\"Black 
Order\",\"powers\":[\"Infinity Gauntlet\",\"Super 
Strength\",\"Teleportation\"],\"allies\":[{\"name\":\"Loki\",\"team\":\"Asg 
[...]
+                   ]
+            });
+
+            return sampleDataBuilder;
+        }
     }
 }


Reply via email to