This is an automated email from the ASF dual-hosted git repository.

curth pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-adbc.git


The following commit(s) were added to refs/heads/main by this push:
     new b0a7b68c2 feat(csharp/src/Drivers/Apache): Add support for Hive ADBC 
Driver with unit tests (#2540)
b0a7b68c2 is described below

commit b0a7b68c222b069c3a36792c8b3408a2b47bab92
Author: amangoyal <[email protected]>
AuthorDate: Tue Mar 4 01:46:37 2025 +0530

    feat(csharp/src/Drivers/Apache): Add support for Hive ADBC Driver with unit 
tests (#2540)
    
    1. Added support for the Hive ADBC driver with HTTP transport protocol.
    2. Added new unit tests for Hive ADBC driver support.
    
    ---------
    
    Co-authored-by: Aman Goyal <[email protected]>
    Co-authored-by: Bruce Irschick <[email protected]>
---
 .../Drivers/Apache/Hive2/HiveServer2AuthType.cs    |  54 ++++++
 .../Drivers/Apache/Hive2/HiveServer2Connection.cs  |  19 +-
 .../Apache/Hive2/HiveServer2ConnectionFactory.cs   |  39 ++++
 .../Drivers/Apache/Hive2/HiveServer2Database.cs    |  46 +++++
 .../HiveServer2Driver.cs}                          |  19 +-
 .../HiveServer2HttpConnection.cs}                  | 212 +++++++++++++++------
 .../Drivers/Apache/Hive2/HiveServer2Parameters.cs  |  24 +++
 .../src/Drivers/Apache/Hive2/HiveServer2Reader.cs  |   8 +-
 .../Drivers/Apache/Hive2/HiveServer2Statement.cs   |   5 +-
 .../Apache/Hive2/HiveServer2TransportType.cs       |  48 +++++
 csharp/src/Drivers/Apache/Hive2/README.md          |  98 ++++++++++
 .../src/Drivers/Apache/Hive2/SqlTypeNameParser.cs  |   7 +-
 .../Apache/Impala/ImpalaConnectionFactory.cs       |  11 +-
 .../Drivers/Apache/Impala/ImpalaHttpConnection.cs  |   6 +-
 .../Apache/Impala/ImpalaStandardConnection.cs      |   2 +
 .../src/Drivers/Apache/Impala/ImpalaStatement.cs   |   1 -
 csharp/src/Drivers/Apache/Spark/README.md          |   2 +-
 .../Drivers/Apache/Spark/SparkConnectionFactory.cs |  13 +-
 .../Drivers/Apache/Spark/SparkHttpConnection.cs    |   6 +-
 csharp/src/Drivers/Apache/Spark/SparkStatement.cs  |   1 -
 csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs    |   5 +-
 .../Apache.Arrow.Adbc.Tests.Drivers.Apache.csproj  |   6 +
 .../Apache/Common/BinaryBooleanValueTests.cs       |   5 +-
 csharp/test/Drivers/Apache/Common/ClientTests.cs   |   3 +-
 .../CommonTestEnvironment.cs}                      |   6 +-
 .../Apache/Common/ComplexTypesValueTests.cs        |   3 +-
 .../Drivers/Apache/Common/DateTimeValueTests.cs    |   3 +-
 csharp/test/Drivers/Apache/Common/DriverTests.cs   |   5 +-
 .../Drivers/Apache/Common/NumericValueTests.cs     |  89 +++++----
 .../test/Drivers/Apache/Common/StatementTests.cs   |   3 +-
 .../test/Drivers/Apache/Common/StringValueTests.cs |   4 +-
 .../Apache/Hive2/BinaryBooleanValueTests.cs        |  66 +++++++
 csharp/test/Drivers/Apache/Hive2/ClientTests.cs    |  48 +++++
 .../Drivers/Apache/Hive2/DateTimeValueTests.cs}    |  26 +--
 csharp/test/Drivers/Apache/Hive2/DriverTests.cs    | 152 +++++++++++++++
 .../Apache/Hive2/HiveServer2TestEnvironment.cs     | 202 +++++++++++++++++++-
 .../test/Drivers/Apache/Hive2/NumericValueTests.cs |  63 ++++++
 .../Drivers/Apache/Hive2/Resources/HiveData.sql    | 105 ++++++++++
 .../Apache/Hive2/Resources/hiveconfig-http.json    |  15 ++
 .../Drivers/Apache/Hive2/StatementTests.cs}        |  21 +-
 .../test/Drivers/Apache/Hive2/StringValueTests.cs  |  43 +++++
 .../Drivers/Apache/Impala/ImpalaTestEnvironment.cs |   4 +-
 .../Drivers/Apache/Spark/SparkConnectionTest.cs    |   9 +-
 .../Drivers/Apache/Spark/SparkTestEnvironment.cs   |   4 +-
 44 files changed, 1304 insertions(+), 207 deletions(-)

diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2AuthType.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2AuthType.cs
new file mode 100644
index 000000000..7a8f62005
--- /dev/null
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2AuthType.cs
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
+{
+    internal enum HiveServer2AuthType
+    {
+        Invalid = 0,
+        None,
+        UsernameOnly,
+        Basic,
+        Empty = int.MaxValue,
+    }
+
+    internal static class HiveServer2AuthTypeParser
+    {
+        internal static bool TryParse(string? authType, out 
HiveServer2AuthType authTypeValue)
+        {
+            switch (authType?.Trim().ToLowerInvariant())
+            {
+                case null:
+                case "":
+                    authTypeValue = HiveServer2AuthType.Empty;
+                    return true;
+                case HiveServer2AuthTypeConstants.None:
+                    authTypeValue = HiveServer2AuthType.None;
+                    return true;
+                case HiveServer2AuthTypeConstants.UsernameOnly:
+                    authTypeValue = HiveServer2AuthType.UsernameOnly;
+                    return true;
+                case HiveServer2AuthTypeConstants.Basic:
+                    authTypeValue = HiveServer2AuthType.Basic;
+                    return true;
+                default:
+                    authTypeValue = HiveServer2AuthType.Invalid;
+                    return false;
+            }
+        }
+    }
+}
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2Connection.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2Connection.cs
index c8c9fac46..7b09fb68a 100644
--- a/csharp/src/Drivers/Apache/Hive2/HiveServer2Connection.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2Connection.cs
@@ -609,8 +609,10 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
                 TRowSet rowSet = GetRowSetAsync(resp, 
cancellationToken).Result;
                 StringArray tableTypes = rowSet.Columns[0].StringVal.Values;
 
+                HashSet<string> distinctTableTypes = new 
HashSet<string>(tableTypes);
+
                 StringArray.Builder tableTypesBuilder = new 
StringArray.Builder();
-                tableTypesBuilder.AppendRange(tableTypes);
+                tableTypesBuilder.AppendRange(distinctTableTypes);
 
                 IArrowArray[] dataArrays = new IArrowArray[]
                 {
@@ -722,11 +724,19 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
             return fileVersionInfo.ProductVersion ?? 
GetProductVersionDefault();
         }
 
-        protected static Uri GetBaseAddress(string? uri, string? hostName, 
string? path, string? port)
+        protected static Uri GetBaseAddress(string? uri, string? hostName, 
string? path, string? port, string hostOptionName)
         {
             // Uri property takes precedent.
             if (!string.IsNullOrWhiteSpace(uri))
             {
+                if (!string.IsNullOrWhiteSpace(hostName))
+                {
+                    throw new ArgumentOutOfRangeException(
+                        AdbcOptions.Uri,
+                        hostOptionName,
+                        $"Conflicting server arguments. Please provide only 
one of the following options: '{Adbc.AdbcOptions.Uri}' or '{hostOptionName}'.");
+                }
+
                 var uriValue = new Uri(uri);
                 if (uriValue.Scheme != Uri.UriSchemeHttp && uriValue.Scheme != 
Uri.UriSchemeHttps)
                     throw new ArgumentOutOfRangeException(
@@ -752,11 +762,12 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
             return baseAddress;
         }
 
-        // Note data source's Position may be one-indexed or zero-indexed
         protected IReadOnlyDictionary<string, int> 
GetColumnIndexMap(List<TColumnDesc> columns) => columns
-           .Select(t => new { Index = t.Position - PositionRequiredOffset, 
t.ColumnName })
+           .Select(t => new { Index = t.Position - ColumnMapIndexOffset, 
t.ColumnName })
            .ToDictionary(t => t.ColumnName, t => t.Index);
 
+        protected abstract int ColumnMapIndexOffset { get; }
+
         protected abstract Task<TRowSet> GetRowSetAsync(TGetTableTypesResp 
response, CancellationToken cancellationToken = default);
         protected abstract Task<TRowSet> GetRowSetAsync(TGetColumnsResp 
response, CancellationToken cancellationToken = default);
         protected abstract Task<TRowSet> GetRowSetAsync(TGetTablesResp 
response, CancellationToken cancellationToken = default);
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2ConnectionFactory.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2ConnectionFactory.cs
new file mode 100644
index 000000000..1666ff264
--- /dev/null
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2ConnectionFactory.cs
@@ -0,0 +1,39 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Collections.Generic;
+
+namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
+{
+    internal class HiveServer2ConnectionFactory
+    {
+        public static HiveServer2Connection 
NewConnection(IReadOnlyDictionary<string, string> properties)
+        {
+            if (!properties.TryGetValue(HiveServer2Parameters.TransportType, 
out string? type))
+            {
+
+                throw new ArgumentException($"Required property 
'{HiveServer2Parameters.TransportType}' is missing. Supported types: 
{HiveServer2TransportTypeParser.SupportedList}", nameof(properties));
+            }
+            if (!HiveServer2TransportTypeParser.TryParse(type, out 
HiveServer2TransportType typeValue))
+            {
+                throw new ArgumentOutOfRangeException(nameof(properties), 
$"Unsupported or unknown value '{type}' given for property 
'{HiveServer2Parameters.TransportType}'. Supported types: 
{HiveServer2TransportTypeParser.SupportedList}");
+            }
+            return new HiveServer2HttpConnection(properties);
+        }
+    }
+}
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2Database.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2Database.cs
new file mode 100644
index 000000000..367ea7065
--- /dev/null
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2Database.cs
@@ -0,0 +1,46 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
+{
+    internal class HiveServer2Database : AdbcDatabase
+    {
+        readonly IReadOnlyDictionary<string, string> properties;
+
+        internal HiveServer2Database(IReadOnlyDictionary<string, string> 
properties)
+        {
+            this.properties = properties;
+        }
+
+        public override AdbcConnection Connect(IReadOnlyDictionary<string, 
string>? options)
+        {
+            // connection options takes precedence over database properties 
for the same option
+            IReadOnlyDictionary<string, string> mergedProperties = options == 
null
+                ? properties
+                : options
+                    .Concat(properties.Where(x => 
!options.Keys.Contains(x.Key, StringComparer.OrdinalIgnoreCase)))
+                    .ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
+            HiveServer2Connection connection = 
HiveServer2ConnectionFactory.NewConnection(mergedProperties);
+            connection.OpenAsync().Wait();
+            return connection;
+        }
+    }
+}
diff --git a/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2Driver.cs
similarity index 58%
copy from csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs
copy to csharp/src/Drivers/Apache/Hive2/HiveServer2Driver.cs
index 9c03a3308..984fe6bd5 100644
--- a/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2Driver.cs
@@ -15,24 +15,15 @@
 * limitations under the License.
 */
 
-using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
+using System.Collections.Generic;
 
-namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
+namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
 {
-    internal class ImpalaStatement : HiveServer2Statement
+    public class HiveServer2Driver : AdbcDriver
     {
-        internal ImpalaStatement(ImpalaConnection connection)
-            : base(connection)
+        public override AdbcDatabase Open(IReadOnlyDictionary<string, string> 
parameters)
         {
-            ValidateOptions(connection.Properties);
-        }
-
-        /// <summary>
-        /// Provides the constant string key values to the <see 
cref="AdbcStatement.SetOption(string, string)" /> method.
-        /// </summary>
-        public sealed class Options : ApacheParameters
-        {
-            // options specific to Impala go here
+            return new HiveServer2Database(parameters);
         }
     }
 }
diff --git a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2HttpConnection.cs
similarity index 51%
copy from csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
copy to csharp/src/Drivers/Apache/Hive2/HiveServer2HttpConnection.cs
index dc0f4b44a..441012b45 100644
--- a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2HttpConnection.cs
@@ -25,129 +25,146 @@ using System.Net.Security;
 using System.Text;
 using System.Threading;
 using System.Threading.Tasks;
-using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
 using Apache.Arrow.Ipc;
 using Apache.Hive.Service.Rpc.Thrift;
 using Thrift;
 using Thrift.Protocol;
 using Thrift.Transport;
 
-namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
+namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
 {
-    internal class SparkHttpConnection : SparkConnection
+    internal class HiveServer2HttpConnection : HiveServer2Connection
     {
-        private static readonly string s_userAgent = $"{DriverName.Replace(" 
", "")}/{ProductVersionDefault}";
+        private const string ProductVersionDefault = "1.0.0";
+        private const string DriverName = "ADBC Hive Driver";
+        private const string ArrowVersion = "1.0.0";
         private const string BasicAuthenticationScheme = "Basic";
-        private const string BearerAuthenticationScheme = "Bearer";
+        private readonly Lazy<string> _productVersion;
+        private static readonly string s_userAgent = $"{DriverName.Replace(" 
", "")}/{ProductVersionDefault}";
+
+        protected override string GetProductVersionDefault() => 
ProductVersionDefault;
 
-        public SparkHttpConnection(IReadOnlyDictionary<string, string> 
properties) : base(properties)
+        protected override string ProductVersion => _productVersion.Value;
+
+        public HiveServer2HttpConnection(IReadOnlyDictionary<string, string> 
properties) : base(properties)
         {
+            ValidateProperties();
+            _productVersion = new Lazy<string>(() => GetProductVersion(), 
LazyThreadSafetyMode.PublicationOnly);
         }
 
-        protected override void ValidateAuthentication()
+        private void ValidateProperties()
+        {
+            ValidateAuthentication();
+            ValidateConnection();
+            ValidateOptions();
+        }
+
+        private void ValidateAuthentication()
         {
             // Validate authentication parameters
-            Properties.TryGetValue(SparkParameters.Token, out string? token);
             Properties.TryGetValue(AdbcOptions.Username, out string? username);
             Properties.TryGetValue(AdbcOptions.Password, out string? password);
-            Properties.TryGetValue(SparkParameters.AuthType, out string? 
authType);
-            bool isValidAuthType = SparkAuthTypeParser.TryParse(authType, out 
SparkAuthType authTypeValue);
+            Properties.TryGetValue(HiveServer2Parameters.AuthType, out string? 
authType);
+            bool isValidAuthType = 
HiveServer2AuthTypeParser.TryParse(authType, out HiveServer2AuthType 
authTypeValue);
             switch (authTypeValue)
             {
-                case SparkAuthType.Token:
-                    if (string.IsNullOrWhiteSpace(token))
-                        throw new ArgumentException(
-                            $"Parameter '{SparkParameters.AuthType}' is set to 
'{SparkAuthTypeConstants.Token}' but parameter '{SparkParameters.Token}' is not 
set. Please provide a value for '{SparkParameters.Token}'.",
-                            nameof(Properties));
-                    break;
-                case SparkAuthType.Basic:
+                case HiveServer2AuthType.Basic:
                     if (string.IsNullOrWhiteSpace(username) || 
string.IsNullOrWhiteSpace(password))
                         throw new ArgumentException(
-                            $"Parameter '{SparkParameters.AuthType}' is set to 
'{SparkAuthTypeConstants.Basic}' but parameters '{AdbcOptions.Username}' or 
'{AdbcOptions.Password}' are not set. Please provide a values for these 
parameters.",
+                            $"Parameter '{HiveServer2Parameters.AuthType}' is 
set to '{HiveServer2AuthTypeConstants.Basic}' but parameters 
'{AdbcOptions.Username}' or '{AdbcOptions.Password}' are not set. Please 
provide a values for these parameters.",
                             nameof(Properties));
                     break;
-                case SparkAuthType.UsernameOnly:
+                case HiveServer2AuthType.UsernameOnly:
                     if (string.IsNullOrWhiteSpace(username))
                         throw new ArgumentException(
-                            $"Parameter '{SparkParameters.AuthType}' is set to 
'{SparkAuthTypeConstants.UsernameOnly}' but parameter '{AdbcOptions.Username}' 
is not set. Please provide a values for this parameter.",
+                            $"Parameter '{HiveServer2Parameters.AuthType}' is 
set to '{HiveServer2AuthTypeConstants.UsernameOnly}' but parameter 
'{AdbcOptions.Username}' is not set. Please provide a values for this 
parameter.",
                             nameof(Properties));
                     break;
-                case SparkAuthType.None:
+                case HiveServer2AuthType.None:
                     break;
-                case SparkAuthType.Empty:
-                    if (string.IsNullOrWhiteSpace(token) && 
(string.IsNullOrWhiteSpace(username) || string.IsNullOrWhiteSpace(password)))
+                case HiveServer2AuthType.Empty:
+                    if (string.IsNullOrWhiteSpace(username) || 
string.IsNullOrWhiteSpace(password))
                         throw new ArgumentException(
-                            $"Parameters must include valid authentiation 
settings. Please provide either '{SparkParameters.Token}'; or 
'{AdbcOptions.Username}' and '{AdbcOptions.Password}'.",
+                            $"Parameters must include valid authentication 
settings. Please provide '{AdbcOptions.Username}' and 
'{AdbcOptions.Password}'.",
                             nameof(Properties));
                     break;
                 default:
-                    throw new 
ArgumentOutOfRangeException(SparkParameters.AuthType, authType, $"Unsupported 
{SparkParameters.AuthType} value.");
+                    throw new 
ArgumentOutOfRangeException(HiveServer2Parameters.AuthType, authType, 
$"Unsupported {HiveServer2Parameters.AuthType} value.");
             }
         }
 
-        protected override void ValidateConnection()
+        private void ValidateConnection()
         {
             // HostName or Uri is required parameter
             Properties.TryGetValue(AdbcOptions.Uri, out string? uri);
-            Properties.TryGetValue(SparkParameters.HostName, out string? 
hostName);
+            Properties.TryGetValue(HiveServer2Parameters.HostName, out string? 
hostName);
             if ((Uri.CheckHostName(hostName) == UriHostNameType.Unknown)
                 && (string.IsNullOrEmpty(uri) || !Uri.TryCreate(uri, 
UriKind.Absolute, out Uri? _)))
             {
                 throw new ArgumentException(
-                    $"Required parameter '{SparkParameters.HostName}' or 
'{AdbcOptions.Uri}' is missing or invalid. Please provide a valid hostname or 
URI for the data source.",
+                    $"Required parameter '{HiveServer2Parameters.HostName}' or 
'{AdbcOptions.Uri}' is missing or invalid. Please provide a valid hostname or 
URI for the data source.",
                     nameof(Properties));
             }
 
             // Validate port range
-            Properties.TryGetValue(SparkParameters.Port, out string? port);
+            Properties.TryGetValue(HiveServer2Parameters.Port, out string? 
port);
             if (int.TryParse(port, out int portNumber) && (portNumber <= 
IPEndPoint.MinPort || portNumber > IPEndPoint.MaxPort))
                 throw new ArgumentOutOfRangeException(
                     nameof(Properties),
                     port,
-                    $"Parameter '{SparkParameters.Port}' value is not in the 
valid range of 1 .. {IPEndPoint.MaxPort}.");
+                    $"Parameter '{HiveServer2Parameters.Port}' value is not in 
the valid range of 1 .. {IPEndPoint.MaxPort}.");
 
             // Ensure the parameters will produce a valid address
-            Properties.TryGetValue(SparkParameters.Path, out string? path);
+            Properties.TryGetValue(HiveServer2Parameters.Path, out string? 
path);
             _ = new HttpClient()
             {
-                BaseAddress = GetBaseAddress(uri, hostName, path, port)
+                BaseAddress = GetBaseAddress(uri, hostName, path, port, 
HiveServer2Parameters.HostName)
             };
         }
 
-        protected override void ValidateOptions()
+        private void ValidateOptions()
         {
-            Properties.TryGetValue(SparkParameters.DataTypeConv, out string? 
dataTypeConv);
+            Properties.TryGetValue(HiveServer2Parameters.DataTypeConv, out 
string? dataTypeConv);
             DataTypeConversion = DataTypeConversionParser.Parse(dataTypeConv);
-            Properties.TryGetValue(SparkParameters.TLSOptions, out string? 
tlsOptions);
+            Properties.TryGetValue(HiveServer2Parameters.TLSOptions, out 
string? tlsOptions);
             TlsOptions = TlsOptionsParser.Parse(tlsOptions);
-            Properties.TryGetValue(SparkParameters.ConnectTimeoutMilliseconds, 
out string? connectTimeoutMs);
+            
Properties.TryGetValue(HiveServer2Parameters.ConnectTimeoutMilliseconds, out 
string? connectTimeoutMs);
             if (connectTimeoutMs != null)
             {
                 ConnectTimeoutMilliseconds = int.TryParse(connectTimeoutMs, 
NumberStyles.Integer, CultureInfo.InvariantCulture, out int 
connectTimeoutMsValue) && (connectTimeoutMsValue >= 0)
                     ? connectTimeoutMsValue
-                    : throw new 
ArgumentOutOfRangeException(SparkParameters.ConnectTimeoutMilliseconds, 
connectTimeoutMs, $"must be a value of 0 (infinite) or between 1 .. 
{int.MaxValue}. default is 30000 milliseconds.");
+                    : throw new 
ArgumentOutOfRangeException(HiveServer2Parameters.ConnectTimeoutMilliseconds, 
connectTimeoutMs, $"must be a value of 0 (infinite) or between 1 .. 
{int.MaxValue}. default is 30000 milliseconds.");
             }
         }
 
-        internal override IArrowArrayStream NewReader<T>(T statement, Schema 
schema) => new HiveServer2Reader(statement, schema, dataTypeConversion: 
statement.Connection.DataTypeConversion);
+        public override AdbcStatement CreateStatement()
+        {
+            return new HiveServer2Statement(this);
+        }
+
+        internal override IArrowArrayStream NewReader<T>(T statement, Schema 
schema) => new HiveServer2Reader(
+                statement,
+                schema,
+                dataTypeConversion: statement.Connection.DataTypeConversion,
+                enableBatchSizeStopCondition: false);
 
         protected override TTransport CreateTransport()
         {
             // Assumption: parameters have already been validated.
-            Properties.TryGetValue(SparkParameters.HostName, out string? 
hostName);
-            Properties.TryGetValue(SparkParameters.Path, out string? path);
-            Properties.TryGetValue(SparkParameters.Port, out string? port);
-            Properties.TryGetValue(SparkParameters.AuthType, out string? 
authType);
-            bool isValidAuthType = SparkAuthTypeParser.TryParse(authType, out 
SparkAuthType authTypeValue);
-            Properties.TryGetValue(SparkParameters.Token, out string? token);
+            Properties.TryGetValue(HiveServer2Parameters.HostName, out string? 
hostName);
+            Properties.TryGetValue(HiveServer2Parameters.Path, out string? 
path);
+            Properties.TryGetValue(HiveServer2Parameters.Port, out string? 
port);
+            Properties.TryGetValue(HiveServer2Parameters.AuthType, out string? 
authType);
+            bool isValidAuthType = 
HiveServer2AuthTypeParser.TryParse(authType, out HiveServer2AuthType 
authTypeValue);
             Properties.TryGetValue(AdbcOptions.Username, out string? username);
             Properties.TryGetValue(AdbcOptions.Password, out string? password);
             Properties.TryGetValue(AdbcOptions.Uri, out string? uri);
 
-            Uri baseAddress = GetBaseAddress(uri, hostName, path, port);
-            AuthenticationHeaderValue? authenticationHeaderValue = 
GetAuthenticationHeaderValue(authTypeValue, token, username, password);
+            Uri baseAddress = GetBaseAddress(uri, hostName, path, port, 
HiveServer2Parameters.HostName);
+            AuthenticationHeaderValue? authenticationHeaderValue = 
GetAuthenticationHeaderValue(authTypeValue, username, password);
 
             HttpClientHandler httpClientHandler = NewHttpClientHandler();
+            httpClientHandler.AutomaticDecompression = 
DecompressionMethods.GZip | DecompressionMethods.Deflate;
             HttpClient httpClient = new(httpClientHandler);
             httpClient.BaseAddress = baseAddress;
             httpClient.DefaultRequestHeaders.Authorization = 
authenticationHeaderValue;
@@ -174,38 +191,33 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
             {
                 httpClientHandler.ServerCertificateCustomValidationCallback = 
(request, certificate, chain, policyErrors) =>
                 {
-                    if (policyErrors == SslPolicyErrors.None) return true;
+                    if 
(policyErrors.HasFlag(SslPolicyErrors.RemoteCertificateChainErrors) && 
!TlsOptions.HasFlag(HiveServer2TlsOption.AllowSelfSigned)) return false;
+                    if 
(policyErrors.HasFlag(SslPolicyErrors.RemoteCertificateNameMismatch) && 
!TlsOptions.HasFlag(HiveServer2TlsOption.AllowHostnameMismatch)) return false;
 
-                    return
-                       
(!policyErrors.HasFlag(SslPolicyErrors.RemoteCertificateChainErrors) || 
TlsOptions.HasFlag(HiveServer2TlsOption.AllowSelfSigned))
-                    && 
(!policyErrors.HasFlag(SslPolicyErrors.RemoteCertificateNameMismatch) || 
TlsOptions.HasFlag(HiveServer2TlsOption.AllowHostnameMismatch));
+                    return true;
                 };
             }
 
             return httpClientHandler;
         }
 
-        private static AuthenticationHeaderValue? 
GetAuthenticationHeaderValue(SparkAuthType authType, string? token, string? 
username, string? password)
+        private static AuthenticationHeaderValue? 
GetAuthenticationHeaderValue(HiveServer2AuthType authType, string? username, 
string? password)
         {
-            if (!string.IsNullOrEmpty(token) && (authType == 
SparkAuthType.Empty || authType == SparkAuthType.Token))
-            {
-                return new 
AuthenticationHeaderValue(BearerAuthenticationScheme, token);
-            }
-            else if (!string.IsNullOrEmpty(username) && 
!string.IsNullOrEmpty(password) && (authType == SparkAuthType.Empty || authType 
== SparkAuthType.Basic))
+            if (!string.IsNullOrEmpty(username) && 
!string.IsNullOrEmpty(password) && (authType == HiveServer2AuthType.Empty || 
authType == HiveServer2AuthType.Basic))
             {
                 return new 
AuthenticationHeaderValue(BasicAuthenticationScheme, 
Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:{password}")));
             }
-            else if (!string.IsNullOrEmpty(username) && (authType == 
SparkAuthType.Empty || authType == SparkAuthType.UsernameOnly))
+            else if (!string.IsNullOrEmpty(username) && (authType == 
HiveServer2AuthType.Empty || authType == HiveServer2AuthType.UsernameOnly))
             {
                 return new 
AuthenticationHeaderValue(BasicAuthenticationScheme, 
Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:")));
             }
-            else if (authType == SparkAuthType.None)
+            else if (authType == HiveServer2AuthType.None)
             {
                 return null;
             }
             else
             {
-                throw new AdbcException("Missing connection properties. Must 
contain 'token' or 'username' and 'password'");
+                throw new AdbcException("Missing connection properties. Must 
contain 'username' and 'password'");
             }
         }
 
@@ -224,6 +236,74 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
             return req;
         }
 
+        protected override void SetPrecisionScaleAndTypeName(
+            short colType,
+            string typeName,
+            TableInfo? tableInfo,
+            int columnSize,
+            int decimalDigits)
+        {
+            // Keep the original type name
+            tableInfo?.TypeName.Add(typeName);
+            switch (colType)
+            {
+                case (short)ColumnTypeId.DECIMAL:
+                case (short)ColumnTypeId.NUMERIC:
+                    {
+                        // Precision/scale is provide in the API call.
+                        SqlDecimalParserResult result = 
SqlTypeNameParser<SqlDecimalParserResult>.Parse(typeName, colType);
+                        tableInfo?.Precision.Add(columnSize);
+                        tableInfo?.Scale.Add((short)decimalDigits);
+                        tableInfo?.BaseTypeName.Add(result.BaseTypeName);
+                        break;
+                    }
+
+                case (short)ColumnTypeId.CHAR:
+                case (short)ColumnTypeId.NCHAR:
+                case (short)ColumnTypeId.VARCHAR:
+                case (short)ColumnTypeId.LONGVARCHAR:
+                case (short)ColumnTypeId.LONGNVARCHAR:
+                case (short)ColumnTypeId.NVARCHAR:
+                    {
+                        // Precision is provide in the API call.
+                        SqlCharVarcharParserResult result = 
SqlTypeNameParser<SqlCharVarcharParserResult>.Parse(typeName, colType);
+                        tableInfo?.Precision.Add(columnSize);
+                        tableInfo?.Scale.Add(null);
+                        tableInfo?.BaseTypeName.Add(result.BaseTypeName);
+                        break;
+                    }
+
+                default:
+                    {
+                        SqlTypeNameParserResult result = 
SqlTypeNameParser<SqlTypeNameParserResult>.Parse(typeName, colType);
+                        tableInfo?.Precision.Add(null);
+                        tableInfo?.Scale.Add(null);
+                        tableInfo?.BaseTypeName.Add(result.BaseTypeName);
+                        break;
+                    }
+            }
+        }
+
+        protected override ColumnsMetadataColumnNames 
GetColumnsMetadataColumnNames()
+        {
+            return new ColumnsMetadataColumnNames()
+            {
+                TableCatalog = TableCat,
+                TableSchema = TableSchem,
+                TableName = TableName,
+                ColumnName = ColumnName,
+                DataType = DataType,
+                TypeName = TypeName,
+                Nullable = Nullable,
+                ColumnDef = ColumnDef,
+                OrdinalPosition = OrdinalPosition,
+                IsNullable = IsNullable,
+                IsAutoIncrement = IsAutoIncrement,
+                ColumnSize = ColumnSize,
+                DecimalDigits = DecimalDigits,
+            };
+        }
+
         protected override Task<TGetResultSetMetadataResp> 
GetResultSetMetadataAsync(TGetSchemasResp response, CancellationToken 
cancellationToken = default) =>
             GetResultSetMetadataAsync(response.OperationHandle, Client, 
cancellationToken);
         protected override Task<TGetResultSetMetadataResp> 
GetResultSetMetadataAsync(TGetCatalogsResp response, CancellationToken 
cancellationToken = default) =>
@@ -243,8 +323,20 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
         protected override Task<TRowSet> GetRowSetAsync(TGetSchemasResp 
response, CancellationToken cancellationToken = default) =>
             FetchResultsAsync(response.OperationHandle, cancellationToken: 
cancellationToken);
 
+        protected internal override int PositionRequiredOffset => 0;
+
+        protected override string InfoDriverName => DriverName;
+
+        protected override string InfoDriverArrowVersion => ArrowVersion;
+
+        protected override bool IsColumnSizeValidForDecimal => false;
+
+        protected override bool GetObjectsPatternsRequireLowerCase => false;
+
         internal override SchemaParser SchemaParser => new 
HiveServer2SchemaParser();
 
-        internal override SparkServerType ServerType => SparkServerType.Http;
+        internal HiveServer2TransportType Type => 
HiveServer2TransportType.Http;
+
+        protected override int ColumnMapIndexOffset => 1;
     }
 }
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2Parameters.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2Parameters.cs
index 4f2bc62d2..b9d7f2c2a 100644
--- a/csharp/src/Drivers/Apache/Hive2/HiveServer2Parameters.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2Parameters.cs
@@ -17,6 +17,30 @@
 
 namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
 {
+    public static class HiveServer2Parameters
+    {
+        public const string HostName = "adbc.hive.host";
+        public const string Port = "adbc.hive.port";
+        public const string Path = "adbc.hive.path";
+        public const string AuthType = "adbc.hive.auth_type";
+        public const string TransportType = "adbc.hive.transport_type";
+        public const string DataTypeConv = "adbc.hive.data_type_conv";
+        public const string TLSOptions = "adbc.hive.tls_options";
+        public const string ConnectTimeoutMilliseconds = 
"adbc.hive.connect_timeout_ms";
+    }
+
+    public static class HiveServer2AuthTypeConstants
+    {
+        public const string None = "none";
+        public const string UsernameOnly = "username_only";
+        public const string Basic = "basic";
+    }
+
+    public static class HiveServer2TransportTypeConstants
+    {
+        public const string Http = "http";
+    }
+
     public static class DataTypeConversionOptions
     {
         public const string None = "none";
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs
index 34dbf10f2..c571947b2 100644
--- a/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs
@@ -57,6 +57,8 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
         private const int MillisecondDecimalPlaces = 3;
         private HiveServer2Statement? _statement;
         private readonly DataTypeConversion _dataTypeConversion;
+        // Flag to enable/disable stopping reading based on batch size 
condition
+        private readonly bool _enableBatchSizeStopCondition;
         private static readonly IReadOnlyDictionary<ArrowTypeId, 
Func<StringArray, IArrowType, IArrowArray>> s_arrowStringConverters =
             new Dictionary<ArrowTypeId, Func<StringArray, IArrowType, 
IArrowArray>>()
             {
@@ -73,11 +75,13 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
         public HiveServer2Reader(
             HiveServer2Statement statement,
             Schema schema,
-            DataTypeConversion dataTypeConversion)
+            DataTypeConversion dataTypeConversion,
+            bool enableBatchSizeStopCondition = true)
         {
             _statement = statement;
             Schema = schema;
             _dataTypeConversion = dataTypeConversion;
+            _enableBatchSizeStopCondition = enableBatchSizeStopCondition;
         }
 
         public Schema Schema { get; }
@@ -97,7 +101,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
 
                 int columnCount = GetColumnCount(response);
                 int rowCount = GetRowCount(response, columnCount);
-                if ((_statement.BatchSize > 0 && rowCount < 
_statement.BatchSize) || rowCount == 0)
+                if ((_enableBatchSizeStopCondition && _statement.BatchSize > 0 
&& rowCount < _statement.BatchSize) || rowCount == 0)
                 {
                     // This is the last batch
                     _statement = null;
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2Statement.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2Statement.cs
index 6b4605e8e..c08f997ca 100644
--- a/csharp/src/Drivers/Apache/Hive2/HiveServer2Statement.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2Statement.cs
@@ -25,11 +25,12 @@ using Thrift.Transport;
 
 namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
 {
-    internal abstract class HiveServer2Statement : AdbcStatement
+    internal class HiveServer2Statement : AdbcStatement
     {
-        protected HiveServer2Statement(HiveServer2Connection connection)
+        internal HiveServer2Statement(HiveServer2Connection connection)
         {
             Connection = connection;
+            ValidateOptions(connection.Properties);
         }
 
         protected virtual void SetStatementProperties(TExecuteStatementReq 
statement)
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2TransportType.cs 
b/csharp/src/Drivers/Apache/Hive2/HiveServer2TransportType.cs
new file mode 100644
index 000000000..6d95fefe3
--- /dev/null
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2TransportType.cs
@@ -0,0 +1,48 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
+{
+    internal enum HiveServer2TransportType
+    {
+        Invalid = 0,
+        Http,
+        Empty = int.MaxValue,
+    }
+
+    internal static class HiveServer2TransportTypeParser
+    {
+        internal const string SupportedList = 
HiveServer2TransportTypeConstants.Http;
+
+        internal static bool TryParse(string? serverType, out 
HiveServer2TransportType serverTypeValue)
+        {
+            switch (serverType?.Trim().ToLowerInvariant())
+            {
+                case null:
+                case "":
+                    serverTypeValue = HiveServer2TransportType.Empty;
+                    return true;
+                case HiveServer2TransportTypeConstants.Http:
+                    serverTypeValue = HiveServer2TransportType.Http;
+                    return true;
+                default:
+                    serverTypeValue = HiveServer2TransportType.Invalid;
+                    return false;
+            }
+        }
+    }
+}
diff --git a/csharp/src/Drivers/Apache/Hive2/README.md 
b/csharp/src/Drivers/Apache/Hive2/README.md
new file mode 100644
index 000000000..e9c863190
--- /dev/null
+++ b/csharp/src/Drivers/Apache/Hive2/README.md
@@ -0,0 +1,98 @@
+<!--
+
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+-->
+
+# Hive Driver
+
+## Database and Connection Properties
+
+Properties should be passed in the call to `HiveServer2Driver.Open`,
+but can also be passed in the call to `AdbcDatabase.Connect`.
+
+| Property               | Description | Default |
+| :---                   | :---        | :---    |
+| `adbc.hive.transport_type`      | (Required) Indicates the Hive transport 
type. `http` | |
+| `adbc.hive.auth_type` | An indicator of the intended type of authentication. 
Allowed values: `none`, `username_only` and `basic`. This property is optional. 
The authentication type can be inferred from `username`, and `password`. | |
+| `adbc.hive.host`      | Host name for the data source. Do not include scheme 
or port number. Example: `hiveserver.region.cloudapp.azure.com` |  |
+| `adbc.hive.port`      | The port number the data source listens on for a new 
connections. | `443` |
+| `adbc.hive.path`      | The URI path on the data source server. Example: 
`/hive2` | |
+| `uri`                  | The full URI that includes scheme, host, port and 
path. Only one of options `uri` or `adbc.hive.host` can be provided. | |
+| `username`             | The user name used for basic authentication | |
+| `password`             | The password for the user name used for basic 
authentication. | |
+| `adbc.hive.data_type_conv` | Comma-separated list of data conversion 
options. Each option indicates the type of conversion to perform on data 
returned from the Hive server. <br><br>Allowed values: `none`, `scalar`. 
<br><br>Option `none` indicates there is no conversion from Hive type to native 
type (i.e., no conversion from String to Timestamp for Apache Hive over HTTP). 
Example `adbc.hive.conv_data_type=none`. <br><br>Option `scalar` will perform 
conversion (if necessary) from the Hiv [...]
+| `adbc.hive.tls_options` | Comma-separated list of TLS/SSL options. Each 
option indicates the TLS/SSL option when connecting to a Hive server. 
<br><br>Allowed values: `allow_self_signed`, `allow_hostname_mismatch`. 
<br><br>Option `allow_self_signed` allows certificate errors due to an unknown 
certificate authority, typically when using a self-signed certificate. Option 
`allow_hostname_mismatch` allow certificate errors due to a mismatch of the 
hostname. (e.g., when connecting through an [...]
+| `adbc.hive.connect_timeout_ms` | Sets the timeout (in milliseconds) to open 
a new session. Values can be 0 (infinite) or greater than zero. | `30000` |
+| `adbc.apache.statement.batch_size` | Sets the maximum number of rows to 
retrieve in a single batch request. | `50000` |
+| `adbc.apache.statement.polltime_ms` | If polling is necessary to get a 
result, this option sets the length of time (in milliseconds) to wait between 
polls. | `500` |
+| `adbc.apache.statement.query_timeout_s` | Sets the maximum time (in seconds) 
for a query to complete. Values can be 0 (infinite) or greater than zero. | 
`60` |
+
+## Timeout Configuration
+
+Timeouts have a hierarchy to their behavior. As specified above, the 
`adbc.hive.connect_timeout_ms` is analogous to a ConnectTimeout and used to 
initially establish a new session with the server.
+
+The `adbc.apache.statement.query_timeout_s` is analogous to a CommandTimeout 
for any subsequent calls to the server for requests, including metadata calls 
and executing queries.
+
+The `adbc.apache.statement.polltime_ms` specifies the time between polls to 
the service, up to the limit specifed by 
`adbc.apache.statement.query_timeout_s`.
+
+## Hive Data Types
+
+The following table depicts how the Hive ADBC driver converts a Hive type to 
an Arrow type and a .NET type:
+
+### Apache Hive over HTTP (adbc.hive.data_type_conv = ?)
+
+| Hive Type           | Arrow Type (`none`) | C# Type (`none`) | Arrow Type 
(`scalar`) | C# Type (`scalar`) |
+| :---                 | :---:      | :---:   | :---:                 | :---:  
            |
+| ARRAY*               | String     | string  | | |
+| BIGINT               | Int64      | long | | |
+| BINARY               | Binary     | byte[] | | |
+| BOOLEAN              | Boolean    | bool | | |
+| CHAR                 | String     | string | | |
+| DATE*                | *String*   | *string* | Date32 | DateTime |
+| DECIMAL*             | *String*   | *string* | Decimal128 | SqlDecimal |
+| DOUBLE               | Double     | double | | |
+| FLOAT                | *Double*   | *double* | Float | float |
+| INT                  | Int32      | int | | |
+| INTERVAL_DAY_TIME+   | String     | string | | |
+| INTERVAL_YEAR_MONTH+ | String     | string | | |
+| MAP*                 | String     | string | | |
+| NULL                 | String     | string | | |
+| SMALLINT             | Int16      | short | | |
+| STRING               | String     | string | | |
+| STRUCT*              | String     | string | | |
+| TIMESTAMP*           | *String*   | *string* | Timestamp | DateTimeOffset |
+| TINYINT              | Int8       | sbyte | | |
+| VARCHAR              | String     | string | | |
+
+\* Types are returned as strings instead of "native" types<br>
+\+ Interval types are returned as strings
+
+## Supported Variants
+
+### Apache Hive over HTTP
+
+Support for Hive over HTTP is the most mature.
+
+### Azure Hive HDInsight
+
+To read data from Azure HDInsight Hive Cluster, use the following parameters:
+adbc.hive.type = "http"
+adbc.hive.port = "443"
+adbc.hive.path = "/hive2"
+adbc.hive.host = $"{clusterHostName}"
+username = $"{clusterUserName}"
+password = $"{clusterPassword}"
diff --git a/csharp/src/Drivers/Apache/Hive2/SqlTypeNameParser.cs 
b/csharp/src/Drivers/Apache/Hive2/SqlTypeNameParser.cs
index 95fa358b1..bd7d285e2 100644
--- a/csharp/src/Drivers/Apache/Hive2/SqlTypeNameParser.cs
+++ b/csharp/src/Drivers/Apache/Hive2/SqlTypeNameParser.cs
@@ -335,8 +335,11 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
 
         public override string BaseTypeName => "CHAR";
 
+        private const int CharColumnSizeDefault = 255;
+
+        // Allow precision definition to be optional
         private static readonly Regex s_expression = new(
-            
@"^\s*(?<typeName>((CHAR)|(NCHAR)))(\s*\(\s*(?<precision>\d{1,10})\s*\))\s*$",
+            
@"^\s*(?<typeName>((CHAR)|(NCHAR)))(\s*\(\s*(?<precision>\d{1,10})\s*\))?\s*$",
             RegexOptions.IgnoreCase | RegexOptions.Compiled | 
RegexOptions.CultureInvariant);
 
         protected override Regex Expression => s_expression;
@@ -348,7 +351,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
 
             int precision = int.TryParse(precisionGroup.Value, out int 
candidatePrecision)
                 ? candidatePrecision
-                : throw new ArgumentException($"Unable to parse length: 
'{precisionGroup.Value}'", nameof(input));
+                : CharColumnSizeDefault;
             return new SqlCharVarcharParserResult(input, BaseTypeName, 
precision);
         }
     }
diff --git a/csharp/src/Drivers/Apache/Impala/ImpalaConnectionFactory.cs 
b/csharp/src/Drivers/Apache/Impala/ImpalaConnectionFactory.cs
index b2be6698c..5a0323123 100644
--- a/csharp/src/Drivers/Apache/Impala/ImpalaConnectionFactory.cs
+++ b/csharp/src/Drivers/Apache/Impala/ImpalaConnectionFactory.cs
@@ -24,13 +24,18 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
     {
         public static ImpalaConnection 
NewConnection(IReadOnlyDictionary<string, string> properties)
         {
-            bool _ = properties.TryGetValue(ImpalaParameters.Type, out string? 
type) && string.IsNullOrEmpty(type);
-            bool __ = ServerTypeParser.TryParse(type, out ImpalaServerType 
serverTypeValue);
+            if (!properties.TryGetValue(ImpalaParameters.Type, out string? 
type) && string.IsNullOrEmpty(type))
+            {
+                throw new ArgumentException($"Required property 
'{ImpalaParameters.Type}' is missing. Supported types: 
{ServerTypeParser.SupportedList}", nameof(properties));
+            }
+            if (!ServerTypeParser.TryParse(type, out ImpalaServerType 
serverTypeValue))
+            {
+                throw new ArgumentOutOfRangeException(nameof(properties), 
$"Unsupported or unknown value '{type}' given for property 
'{ImpalaParameters.Type}'. Supported types: {ServerTypeParser.SupportedList}");
+            }
             return serverTypeValue switch
             {
                 ImpalaServerType.Http => new ImpalaHttpConnection(properties),
                 ImpalaServerType.Standard => new 
ImpalaStandardConnection(properties),
-                ImpalaServerType.Empty => throw new 
ArgumentException($"Required property '{ImpalaParameters.Type}' is missing. 
Supported types: {ServerTypeParser.SupportedList}", nameof(properties)),
                 _ => throw new ArgumentOutOfRangeException(nameof(properties), 
$"Unsupported or unknown value '{type}' given for property 
'{ImpalaParameters.Type}'. Supported types: {ServerTypeParser.SupportedList}"),
             };
         }
diff --git a/csharp/src/Drivers/Apache/Impala/ImpalaHttpConnection.cs 
b/csharp/src/Drivers/Apache/Impala/ImpalaHttpConnection.cs
index 12b2b89b9..67a7aa3d7 100644
--- a/csharp/src/Drivers/Apache/Impala/ImpalaHttpConnection.cs
+++ b/csharp/src/Drivers/Apache/Impala/ImpalaHttpConnection.cs
@@ -102,7 +102,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
             Properties.TryGetValue(ImpalaParameters.Path, out string? path);
             _ = new HttpClient()
             {
-                BaseAddress = GetBaseAddress(uri, hostName, path, port)
+                BaseAddress = GetBaseAddress(uri, hostName, path, port, 
ImpalaParameters.HostName)
             };
         }
 
@@ -135,7 +135,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
             Properties.TryGetValue(AdbcOptions.Password, out string? password);
             Properties.TryGetValue(AdbcOptions.Uri, out string? uri);
 
-            Uri baseAddress = GetBaseAddress(uri, hostName, path, port);
+            Uri baseAddress = GetBaseAddress(uri, hostName, path, port, 
ImpalaParameters.HostName);
             AuthenticationHeaderValue? authenticationHeaderValue = 
GetAuthenticationHeaderValue(authTypeValue, username, password);
 
             HttpClientHandler httpClientHandler = NewHttpClientHandler();
@@ -213,5 +213,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
         internal override SchemaParser SchemaParser => new 
HiveServer2SchemaParser();
 
         internal override ImpalaServerType ServerType => ImpalaServerType.Http;
+
+        protected override int ColumnMapIndexOffset => 0;
     }
 }
diff --git a/csharp/src/Drivers/Apache/Impala/ImpalaStandardConnection.cs 
b/csharp/src/Drivers/Apache/Impala/ImpalaStandardConnection.cs
index 29f9195d2..01045618d 100644
--- a/csharp/src/Drivers/Apache/Impala/ImpalaStandardConnection.cs
+++ b/csharp/src/Drivers/Apache/Impala/ImpalaStandardConnection.cs
@@ -146,5 +146,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
         internal override IArrowArrayStream NewReader<T>(T statement, Schema 
schema) => new HiveServer2Reader(statement, schema, dataTypeConversion: 
statement.Connection.DataTypeConversion);
 
         internal override ImpalaServerType ServerType => 
ImpalaServerType.Standard;
+
+        protected override int ColumnMapIndexOffset => 0;
     }
 }
diff --git a/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs 
b/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs
index 9c03a3308..840fbe297 100644
--- a/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs
+++ b/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs
@@ -24,7 +24,6 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
         internal ImpalaStatement(ImpalaConnection connection)
             : base(connection)
         {
-            ValidateOptions(connection.Properties);
         }
 
         /// <summary>
diff --git a/csharp/src/Drivers/Apache/Spark/README.md 
b/csharp/src/Drivers/Apache/Spark/README.md
index 3b5a0e79e..1510c0291 100644
--- a/csharp/src/Drivers/Apache/Spark/README.md
+++ b/csharp/src/Drivers/Apache/Spark/README.md
@@ -32,7 +32,7 @@ but can also be passed in the call to `AdbcDatabase.Connect`.
 | `adbc.spark.port`      | The port number the data source listens on for a 
new connections. | `443` |
 | `adbc.spark.path`      | The URI path on the data source server. Example: 
`sql/protocolv1/o/0123456789123456/01234-0123456-source` | |
 | `adbc.spark.token`     | For token-based authentication, the token to be 
authenticated on the data source. Example: `abcdef0123456789` | |
-| `uri`                  | The full URI that includes scheme, host, port and 
path. If set, this property takes precedence over `adbc.spark.host`, 
`adbc.spark.port` and `adbc.spark.path`. | |
+| `uri`                  | The full URI that includes scheme, host, port and 
path. Only one of options `uri` or `adbc.hive.host` can be provided. | |
 | `username`             | The user name used for basic authentication | |
 | `password`             | The password for the user name used for basic 
authentication. | |
 | `adbc.spark.data_type_conv` | Comma-separated list of data conversion 
options. Each option indicates the type of conversion to perform on data 
returned from the Spark server. <br><br>Allowed values: `none`, `scalar`. 
<br><br>Option `none` indicates there is no conversion from Spark type to 
native type (i.e., no conversion from String to Timestamp for Apache Spark over 
HTTP). Example `adbc.spark.conv_data_type=none`. <br><br>Option `scalar` will 
perform conversion (if necessary) from th [...]
diff --git a/csharp/src/Drivers/Apache/Spark/SparkConnectionFactory.cs 
b/csharp/src/Drivers/Apache/Spark/SparkConnectionFactory.cs
index 7e432289e..7440f95f7 100644
--- a/csharp/src/Drivers/Apache/Spark/SparkConnectionFactory.cs
+++ b/csharp/src/Drivers/Apache/Spark/SparkConnectionFactory.cs
@@ -24,16 +24,23 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
     {
         public static SparkConnection 
NewConnection(IReadOnlyDictionary<string, string> properties)
         {
-            bool _ = properties.TryGetValue(SparkParameters.Type, out string? 
type) && string.IsNullOrEmpty(type);
-            bool __ = ServerTypeParser.TryParse(type, out SparkServerType 
serverTypeValue);
+            if (!properties.TryGetValue(SparkParameters.Type, out string? 
type) && string.IsNullOrEmpty(type))
+            {
+                throw new ArgumentException($"Required property 
'{SparkParameters.Type}' is missing. Supported types: 
{ServerTypeParser.SupportedList}", nameof(properties));
+            }
+            if (!ServerTypeParser.TryParse(type, out SparkServerType 
serverTypeValue))
+            {
+                throw new ArgumentOutOfRangeException(nameof(properties), 
$"Unsupported or unknown value '{type}' given for property 
'{SparkParameters.Type}'. Supported types: {ServerTypeParser.SupportedList}");
+            }
+
             return serverTypeValue switch
             {
                 SparkServerType.Databricks => new 
SparkDatabricksConnection(properties),
                 SparkServerType.Http => new SparkHttpConnection(properties),
                 // TODO: Re-enable when properly supported
                 //SparkServerType.Standard => new 
SparkStandardConnection(properties),
-                SparkServerType.Empty => throw new 
ArgumentException($"Required property '{SparkParameters.Type}' is missing. 
Supported types: {ServerTypeParser.SupportedList}", nameof(properties)),
                 _ => throw new ArgumentOutOfRangeException(nameof(properties), 
$"Unsupported or unknown value '{type}' given for property 
'{SparkParameters.Type}'. Supported types: {ServerTypeParser.SupportedList}"),
+
             };
         }
 
diff --git a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs 
b/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
index dc0f4b44a..33701a8f9 100644
--- a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
+++ b/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
@@ -110,7 +110,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
             Properties.TryGetValue(SparkParameters.Path, out string? path);
             _ = new HttpClient()
             {
-                BaseAddress = GetBaseAddress(uri, hostName, path, port)
+                BaseAddress = GetBaseAddress(uri, hostName, path, port, 
SparkParameters.HostName)
             };
         }
 
@@ -144,7 +144,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
             Properties.TryGetValue(AdbcOptions.Password, out string? password);
             Properties.TryGetValue(AdbcOptions.Uri, out string? uri);
 
-            Uri baseAddress = GetBaseAddress(uri, hostName, path, port);
+            Uri baseAddress = GetBaseAddress(uri, hostName, path, port, 
SparkParameters.HostName);
             AuthenticationHeaderValue? authenticationHeaderValue = 
GetAuthenticationHeaderValue(authTypeValue, token, username, password);
 
             HttpClientHandler httpClientHandler = NewHttpClientHandler();
@@ -246,5 +246,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
         internal override SchemaParser SchemaParser => new 
HiveServer2SchemaParser();
 
         internal override SparkServerType ServerType => SparkServerType.Http;
+
+        protected override int ColumnMapIndexOffset => 1;
     }
 }
diff --git a/csharp/src/Drivers/Apache/Spark/SparkStatement.cs 
b/csharp/src/Drivers/Apache/Spark/SparkStatement.cs
index 945558bd8..5decbddb0 100644
--- a/csharp/src/Drivers/Apache/Spark/SparkStatement.cs
+++ b/csharp/src/Drivers/Apache/Spark/SparkStatement.cs
@@ -26,7 +26,6 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
         internal SparkStatement(SparkConnection connection)
             : base(connection)
         {
-            ValidateOptions(connection.Properties);
         }
 
         protected override void SetStatementProperties(TExecuteStatementReq 
statement)
diff --git a/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs 
b/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs
index 8a6f2959d..b7f9b76bb 100644
--- a/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs
+++ b/csharp/test/Apache.Arrow.Adbc.Tests/TestBase.cs
@@ -85,7 +85,7 @@ namespace Apache.Arrow.Adbc.Tests
         {
             string tableName = NewTableName();
             string sqlUpdate = 
TestEnvironment.GetCreateTemporaryTableStatement(tableName, columns);
-            return await TemporaryTable.NewTemporaryTableAsync(statement, 
tableName, sqlUpdate);
+            return await TemporaryTable.NewTemporaryTableAsync(statement, 
tableName, sqlUpdate, OutputHelper);
         }
 
         /// <summary>
@@ -684,9 +684,10 @@ namespace Apache.Arrow.Adbc.Tests
             /// <param name="tableName">The name of temporary table to 
create.</param>
             /// <param name="sqlUpdate">The SQL query to create the table in 
the native SQL dialect.</param>
             /// <returns></returns>
-            public static async Task<TemporaryTable> 
NewTemporaryTableAsync(AdbcStatement statement, string tableName, string 
sqlUpdate)
+            public static async Task<TemporaryTable> 
NewTemporaryTableAsync(AdbcStatement statement, string tableName, string 
sqlUpdate, ITestOutputHelper? outputHelper = default)
             {
                 statement.SqlQuery = sqlUpdate;
+                outputHelper?.WriteLine(sqlUpdate);
                 await statement.ExecuteUpdateAsync();
                 return new TemporaryTable(statement, tableName);
             }
diff --git 
a/csharp/test/Drivers/Apache/Apache.Arrow.Adbc.Tests.Drivers.Apache.csproj 
b/csharp/test/Drivers/Apache/Apache.Arrow.Adbc.Tests.Drivers.Apache.csproj
index 8e9fdb4df..63365312e 100644
--- a/csharp/test/Drivers/Apache/Apache.Arrow.Adbc.Tests.Drivers.Apache.csproj
+++ b/csharp/test/Drivers/Apache/Apache.Arrow.Adbc.Tests.Drivers.Apache.csproj
@@ -28,6 +28,12 @@
   </ItemGroup>
 
   <ItemGroup>
+    <None Update="Hive2\Resources\hiveconfig-http.json">
+      <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+    </None>
+    <None Update="Hive2\Resources\HiveData.sql">
+      <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+    </None>
     <None Update="Impala\Resources\ImpalaData.sql">
       <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
     </None>
diff --git a/csharp/test/Drivers/Apache/Common/BinaryBooleanValueTests.cs 
b/csharp/test/Drivers/Apache/Common/BinaryBooleanValueTests.cs
index efbd31641..7ab44685a 100644
--- a/csharp/test/Drivers/Apache/Common/BinaryBooleanValueTests.cs
+++ b/csharp/test/Drivers/Apache/Common/BinaryBooleanValueTests.cs
@@ -20,7 +20,6 @@ using System.Collections.Generic;
 using System.Globalization;
 using System.Text;
 using System.Threading.Tasks;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
 using Xunit;
 using Xunit.Abstractions;
 
@@ -34,7 +33,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
     /// </summary>
     public abstract class BinaryBooleanValueTests<TConfig, TEnv> : 
TestBase<TConfig, TEnv>
         where TConfig : TestConfiguration
-        where TEnv : HiveServer2TestEnvironment<TConfig>
+        where TEnv : CommonTestEnvironment<TConfig>
     {
         public BinaryBooleanValueTests(ITestOutputHelper output, 
TestEnvironment<TConfig>.Factory<TEnv> testEnvFactory)
             : base(output, testEnvFactory) { }
@@ -106,7 +105,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
         /// </summary>
         public virtual async Task TestNullData(string projectionClause)
         {
-            string selectStatement = $"SELECT {projectionClause};";
+            string selectStatement = $"SELECT {projectionClause}";
             // Note: by default, this returns as String type, not NULL type.
             await SelectAndValidateValuesAsync(selectStatement, (object?)null, 
1);
         }
diff --git a/csharp/test/Drivers/Apache/Common/ClientTests.cs 
b/csharp/test/Drivers/Apache/Common/ClientTests.cs
index 9148d7281..f1ce3d03e 100644
--- a/csharp/test/Drivers/Apache/Common/ClientTests.cs
+++ b/csharp/test/Drivers/Apache/Common/ClientTests.cs
@@ -18,7 +18,6 @@
 using System;
 using System.Collections.Generic;
 using Apache.Arrow.Adbc.Client;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
 using Apache.Arrow.Adbc.Tests.Xunit;
 using Xunit;
 using Xunit.Abstractions;
@@ -39,7 +38,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
     [Collection("TableCreateTestCollection")]
     public abstract class ClientTests<TConfig, TEnv> : TestBase<TConfig, TEnv>
         where TConfig : TestConfiguration
-        where TEnv : HiveServer2TestEnvironment<TConfig>
+        where TEnv : CommonTestEnvironment<TConfig>
     {
         public ClientTests(ITestOutputHelper? outputHelper, 
TestEnvironment<TConfig>.Factory<TEnv> testEnvFactory)
             : base(outputHelper, testEnvFactory)
diff --git a/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs 
b/csharp/test/Drivers/Apache/Common/CommonTestEnvironment.cs
similarity index 87%
copy from csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs
copy to csharp/test/Drivers/Apache/Common/CommonTestEnvironment.cs
index f141b293c..d0ac833df 100644
--- a/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs
+++ b/csharp/test/Drivers/Apache/Common/CommonTestEnvironment.cs
@@ -18,12 +18,12 @@
 using System;
 using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
 
-namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
 {
-    public abstract class HiveServer2TestEnvironment<TConfig> : 
TestEnvironment<TConfig>
+    public abstract class CommonTestEnvironment<TConfig> : 
TestEnvironment<TConfig>
         where TConfig : TestConfiguration
     {
-        public HiveServer2TestEnvironment(Func<AdbcConnection> getConnection)
+        public CommonTestEnvironment(Func<AdbcConnection> getConnection)
             : base(getConnection)
         {
         }
diff --git a/csharp/test/Drivers/Apache/Common/ComplexTypesValueTests.cs 
b/csharp/test/Drivers/Apache/Common/ComplexTypesValueTests.cs
index c7fabcf24..fe7ba4a8b 100644
--- a/csharp/test/Drivers/Apache/Common/ComplexTypesValueTests.cs
+++ b/csharp/test/Drivers/Apache/Common/ComplexTypesValueTests.cs
@@ -16,7 +16,6 @@
 */
 
 using System.Threading.Tasks;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
 using Xunit;
 using Xunit.Abstractions;
 
@@ -30,7 +29,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
     /// </summary>
     public abstract class ComplexTypesValueTests<TConfig, TEnv> : 
TestBase<TConfig, TEnv>
         where TConfig : TestConfiguration
-        where TEnv : HiveServer2TestEnvironment<TConfig>
+        where TEnv : CommonTestEnvironment<TConfig>
     {
         public ComplexTypesValueTests(ITestOutputHelper output, 
TestEnvironment<TConfig>.Factory<TEnv> testEnvFactory)
             : base(output, testEnvFactory) { }
diff --git a/csharp/test/Drivers/Apache/Common/DateTimeValueTests.cs 
b/csharp/test/Drivers/Apache/Common/DateTimeValueTests.cs
index 8a8117160..02fee4c5e 100644
--- a/csharp/test/Drivers/Apache/Common/DateTimeValueTests.cs
+++ b/csharp/test/Drivers/Apache/Common/DateTimeValueTests.cs
@@ -19,7 +19,6 @@ using System;
 using System.Collections.Generic;
 using System.Globalization;
 using System.Threading.Tasks;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
 using Xunit;
 using Xunit.Abstractions;
 
@@ -33,7 +32,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
     /// </summary>
     public abstract class DateTimeValueTests<TConfig, TEnv> : 
TestBase<TConfig, TEnv>
         where TConfig : TestConfiguration
-        where TEnv : HiveServer2TestEnvironment<TConfig>
+        where TEnv : CommonTestEnvironment<TConfig>
     {
         // Spark handles microseconds but not nanoseconds. Truncated to 6 
decimal places.
         const string DateTimeZoneFormat = "yyyy-MM-dd'T'HH:mm:ss'.'ffffffK";
diff --git a/csharp/test/Drivers/Apache/Common/DriverTests.cs 
b/csharp/test/Drivers/Apache/Common/DriverTests.cs
index d133f83e7..6616f9e80 100644
--- a/csharp/test/Drivers/Apache/Common/DriverTests.cs
+++ b/csharp/test/Drivers/Apache/Common/DriverTests.cs
@@ -21,7 +21,6 @@ using System.Linq;
 using System.Text.RegularExpressions;
 using System.Threading.Tasks;
 using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
 using Apache.Arrow.Adbc.Tests.Metadata;
 using Apache.Arrow.Adbc.Tests.Xunit;
 using Apache.Arrow.Ipc;
@@ -45,7 +44,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
     [Collection("TableCreateTestCollection")]
     public abstract class DriverTests<TConfig, TEnv> : TestBase<TConfig, TEnv>
         where TConfig : ApacheTestConfiguration
-        where TEnv : HiveServer2TestEnvironment<TConfig>
+        where TEnv : CommonTestEnvironment<TConfig>
     {
         /// <summary>
         /// Supported data types as a subset of <see cref="ColumnTypeId"/>
@@ -445,7 +444,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
             string schemaName = schema.SchemaName;
             string catalogFormatted = string.IsNullOrEmpty(catalogName) ? 
string.Empty : DelimitIdentifier(catalogName) + ".";
             string fullTableName = 
$"{catalogFormatted}{DelimitIdentifier(schemaName)}.{DelimitIdentifier(tableName)}";
-            using TemporaryTable temporaryTable = 
TemporaryTable.NewTemporaryTableAsync(Statement, fullTableName, $"CREATE TABLE 
IF NOT EXISTS {fullTableName} (INDEX INT)").Result;
+            using TemporaryTable temporaryTable = 
TemporaryTable.NewTemporaryTableAsync(Statement, fullTableName, $"CREATE TABLE 
IF NOT EXISTS {fullTableName} (INDEX INT)", OutputHelper).Result;
 
             using IArrowArrayStream stream = Connection.GetObjects(
                     depth: AdbcConnection.GetObjectsDepth.Tables,
diff --git a/csharp/test/Drivers/Apache/Common/NumericValueTests.cs 
b/csharp/test/Drivers/Apache/Common/NumericValueTests.cs
index 7c89c28e6..58f931375 100644
--- a/csharp/test/Drivers/Apache/Common/NumericValueTests.cs
+++ b/csharp/test/Drivers/Apache/Common/NumericValueTests.cs
@@ -18,7 +18,6 @@
 using System.Data.SqlTypes;
 using System.Threading.Tasks;
 using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
 using Xunit;
 using Xunit.Abstractions;
 
@@ -29,7 +28,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
 
     public abstract class NumericValueTests<TConfig, TEnv> : TestBase<TConfig, 
TEnv>
         where TConfig : TestConfiguration
-        where TEnv : HiveServer2TestEnvironment<TConfig>
+        where TEnv : CommonTestEnvironment<TConfig>
     {
         /// <summary>
         /// Validates that specific numeric values can be inserted, retrieved 
and targeted correctly
@@ -121,23 +120,23 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
             await ValidateInsertSelectDeleteSingleValueAsync(table.TableName, 
columnName, expectedValue, $"CAST({expectedValue} as {columnType})");
         }
 
-        /// <summary>
-        /// Validates if driver correctly errors out when the values exceed 
the column's limit
-        /// </summary>
-        [SkippableTheory]
-        [InlineData(-100)]
-        [InlineData(100)]
-        [InlineData(int.MaxValue)]
-        [InlineData(int.MinValue)]
-        public async Task TestSmallNumberRangeOverlimit(int value)
-        {
-            string columnName = "SMALLNUMBER";
-            using TemporaryTable table = await 
NewTemporaryTableAsync(Statement, string.Format("{0} DECIMAL(2,0)", 
columnName));
-            await Assert.ThrowsAsync<HiveServer2Exception>(
-                async () => await ValidateInsertSelectDeleteSingleValueAsync(
-                    table.TableName,
-                    columnName, 
TestEnvironment.GetValueForProtocolVersion(value.ToString(), new 
SqlDecimal(value))));
-        }
+        ///// <summary>
+        ///// Validates if driver correctly errors out when the values exceed 
the column's limit
+        ///// </summary>
+        //[SkippableTheory]
+        //[InlineData(-100)]
+        //[InlineData(100)]
+        //[InlineData(int.MaxValue)]
+        //[InlineData(int.MinValue)]
+        //public async Task TestSmallNumberRangeOverlimit(int value)
+        //{
+        //    string columnName = "SMALLNUMBER";
+        //    using TemporaryTable table = await 
NewTemporaryTableAsync(Statement, string.Format("{0} DECIMAL(2,0)", 
columnName));
+        //    await Assert.ThrowsAsync<HiveServer2Exception>(
+        //        async () => await ValidateInsertSelectDeleteSingleValueAsync(
+        //            table.TableName,
+        //            columnName, 
TestEnvironment.GetValueForProtocolVersion(value.ToString(), new 
SqlDecimal(value))));
+        //}
 
         /// <summary>
         /// Validates if driver can handle a large scale Number type correctly
@@ -155,20 +154,20 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
             await ValidateInsertSelectDeleteSingleValueAsync(table.TableName, 
columnName, TestEnvironment.GetValueForProtocolVersion(value, new 
SqlDecimal(double.Parse(value))));
         }
 
-        /// <summary>
-        /// Validates if driver can error handle when input goes beyond a 
large scale Number type
-        /// </summary>
-        [SkippableTheory]
-        [InlineData("-10")]
-        [InlineData("10")]
-        [InlineData("99999999999999999999999999999999999999")]
-        [InlineData("-99999999999999999999999999999999999999")]
-        public async Task TestLargeScaleNumberOverlimit(string value)
-        {
-            string columnName = "LARGESCALENUMBER";
-            using TemporaryTable table = await 
NewTemporaryTableAsync(Statement, string.Format("{0} DECIMAL(38,37)", 
columnName));
-            await Assert.ThrowsAsync<HiveServer2Exception>(async () => await 
ValidateInsertSelectDeleteSingleValueAsync(table.TableName, columnName, 
SqlDecimal.Parse(value)));
-        }
+        ///// <summary>
+        ///// Validates if driver can error handle when input goes beyond a 
large scale Number type
+        ///// </summary>
+        //[SkippableTheory]
+        //[InlineData("-10")]
+        //[InlineData("10")]
+        //[InlineData("99999999999999999999999999999999999999")]
+        //[InlineData("-99999999999999999999999999999999999999")]
+        //public async Task TestLargeScaleNumberOverlimit(string value)
+        //{
+        //    string columnName = "LARGESCALENUMBER";
+        //    using TemporaryTable table = await 
NewTemporaryTableAsync(Statement, string.Format("{0} DECIMAL(1,0)", 
columnName));
+        //    await Assert.ThrowsAsync<HiveServer2Exception>(async () => await 
ValidateInsertSelectDeleteSingleValueAsync(table.TableName, columnName, 
SqlDecimal.Parse(value)));
+        //}
 
         /// <summary>
         /// Validates if driver can handle a small scale Number type correctly
@@ -185,18 +184,18 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
             await ValidateInsertSelectDeleteSingleValueAsync(table.TableName, 
columnName, TestEnvironment.GetValueForProtocolVersion(value, 
SqlDecimal.Parse(value)));
         }
 
-        /// <summary>
-        /// Validates if driver can error handle when an insert goes beyond a 
small scale Number type correctly
-        /// </summary>
-        [SkippableTheory]
-        [InlineData("-99999999999999999999999999999999999999")]
-        [InlineData("99999999999999999999999999999999999999")]
-        public async Task TestSmallScaleNumberOverlimit(string value)
-        {
-            string columnName = "SMALLSCALENUMBER";
-            using TemporaryTable table = await 
NewTemporaryTableAsync(Statement, string.Format("{0} DECIMAL(38,2)", 
columnName));
-            await Assert.ThrowsAsync<HiveServer2Exception>(async () => await 
ValidateInsertSelectDeleteSingleValueAsync(table.TableName, columnName, 
SqlDecimal.Parse(value)));
-        }
+        ///// <summary>
+        ///// Validates if driver can error handle when an insert goes beyond 
a small scale Number type correctly
+        ///// </summary>
+        //[SkippableTheory]
+        //[InlineData("-99999999999999999999999999999999999999")]
+        //[InlineData("99999999999999999999999999999999999999")]
+        //public async Task TestSmallScaleNumberOverlimit(string value)
+        //{
+        //    string columnName = "SMALLSCALENUMBER";
+        //    using TemporaryTable table = await 
NewTemporaryTableAsync(Statement, string.Format("{0} DECIMAL(38,2)", 
columnName));
+        //    await Assert.ThrowsAsync<HiveServer2Exception>(async () => await 
ValidateInsertSelectDeleteSingleValueAsync(table.TableName, columnName, 
SqlDecimal.Parse(value)));
+        //}
 
         /// <summary>
         /// Tests that decimals are rounded as expected.
diff --git a/csharp/test/Drivers/Apache/Common/StatementTests.cs 
b/csharp/test/Drivers/Apache/Common/StatementTests.cs
index b5636adc4..0af9e3f39 100644
--- a/csharp/test/Drivers/Apache/Common/StatementTests.cs
+++ b/csharp/test/Drivers/Apache/Common/StatementTests.cs
@@ -19,7 +19,6 @@ using System;
 using System.Collections.Generic;
 using System.Threading.Tasks;
 using Apache.Arrow.Adbc.Drivers.Apache;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
 using Apache.Arrow.Adbc.Tests.Xunit;
 using Xunit;
 using Xunit.Abstractions;
@@ -36,7 +35,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
     [TestCaseOrderer("Apache.Arrow.Adbc.Tests.Xunit.TestOrderer", 
"Apache.Arrow.Adbc.Tests")]
     public abstract class StatementTests<TConfig, TEnv> : TestBase<TConfig, 
TEnv>
         where TConfig : ApacheTestConfiguration
-        where TEnv : HiveServer2TestEnvironment<TConfig>
+        where TEnv : CommonTestEnvironment<TConfig>
     {
         private static List<string> DefaultTableTypes => ["TABLE", "VIEW"];
 
diff --git a/csharp/test/Drivers/Apache/Common/StringValueTests.cs 
b/csharp/test/Drivers/Apache/Common/StringValueTests.cs
index c83f7b90b..596fa010a 100644
--- a/csharp/test/Drivers/Apache/Common/StringValueTests.cs
+++ b/csharp/test/Drivers/Apache/Common/StringValueTests.cs
@@ -19,7 +19,6 @@ using System;
 using System.Collections.Generic;
 using System.Threading.Tasks;
 using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
 using Xunit;
 using Xunit.Abstractions;
 
@@ -33,7 +32,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
     /// </summary>
     public abstract class StringValueTests<TConfig, TEnv> : TestBase<TConfig, 
TEnv>
         where TConfig : TestConfiguration
-        where TEnv : HiveServer2TestEnvironment<TConfig>
+        where TEnv : CommonTestEnvironment<TConfig>
     {
         public StringValueTests(ITestOutputHelper output, 
TestEnvironment<TConfig>.Factory<TEnv> testEnvFactory)
             : base(output, testEnvFactory) { }
@@ -112,6 +111,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Common
             string columnName = "VARCHARTYPE";
             using TemporaryTable table = await 
NewTemporaryTableAsync(Statement, string.Format("{0} {1}", columnName, 
"VARCHAR(10)"));
             AdbcException exception = await 
Assert.ThrowsAsync<HiveServer2Exception>(async () => await 
ValidateInsertSelectDeleteSingleValueAsync(
+                GetSelectSingleValueStatement(table.TableName, columnName, 
value.Substring(0, 10)),
                 table.TableName,
                 columnName,
                 value,
diff --git a/csharp/test/Drivers/Apache/Hive2/BinaryBooleanValueTests.cs 
b/csharp/test/Drivers/Apache/Hive2/BinaryBooleanValueTests.cs
new file mode 100644
index 000000000..315cf8f49
--- /dev/null
+++ b/csharp/test/Drivers/Apache/Hive2/BinaryBooleanValueTests.cs
@@ -0,0 +1,66 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System.Text;
+using System.Threading.Tasks;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
+{
+    public class BinaryBooleanValueTests : 
Common.BinaryBooleanValueTests<ApacheTestConfiguration, 
HiveServer2TestEnvironment>
+    {
+        public BinaryBooleanValueTests(ITestOutputHelper output)
+            : base(output, new HiveServer2TestEnvironment.Factory())
+        {
+        }
+
+        [SkippableTheory]
+        [InlineData(null)]
+        [MemberData(nameof(AsciiArrayData), 0)]
+        [MemberData(nameof(AsciiArrayData), 2)]
+        [MemberData(nameof(AsciiArrayData), 1024)]
+        public override Task TestBinaryData(byte[]? value)
+        {
+            return base.TestBinaryData(value);
+        }
+
+        [SkippableTheory]
+        [InlineData("NULL")]
+        [InlineData("CAST(NULL AS INT)")]
+        [InlineData("CAST(NULL AS BIGINT)")]
+        [InlineData("CAST(NULL AS SMALLINT)")]
+        [InlineData("CAST(NULL AS TINYINT)")]
+        [InlineData("CAST(NULL AS FLOAT)")]
+        [InlineData("CAST(NULL AS DOUBLE)")]
+        [InlineData("CAST(NULL AS DECIMAL(38,0))")]
+        [InlineData("CAST(NULL AS STRING)")]
+        [InlineData("CAST(NULL AS VARCHAR(10))")]
+        [InlineData("CAST(NULL AS CHAR(10))")]
+        [InlineData("CAST(NULL AS BOOLEAN)")]
+        [InlineData("CAST(NULL AS BINARY)")]
+        public override Task TestNullData(string projectionClause)
+        {
+            return base.TestNullData(projectionClause);
+        }
+
+        protected override string? GetFormattedBinaryValue(byte[]? value)
+        {
+            return value != null ? $"CAST ('{Encoding.UTF8.GetString(value)}' 
as BINARY)" : null;
+        }
+    }
+}
diff --git a/csharp/test/Drivers/Apache/Hive2/ClientTests.cs 
b/csharp/test/Drivers/Apache/Hive2/ClientTests.cs
new file mode 100644
index 000000000..16a2d1069
--- /dev/null
+++ b/csharp/test/Drivers/Apache/Hive2/ClientTests.cs
@@ -0,0 +1,48 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System.Collections.Generic;
+using Xunit.Abstractions;
+
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
+{
+    public class ClientTests : Common.ClientTests<ApacheTestConfiguration, 
HiveServer2TestEnvironment>
+    {
+        public ClientTests(ITestOutputHelper? outputHelper)
+            : base(outputHelper, new HiveServer2TestEnvironment.Factory())
+        {
+        }
+
+        protected override IReadOnlyList<int> GetUpdateExpectedResults()
+        {
+            int affectedRows = ValidateAffectedRows ? 1 : -1;
+            return GetUpdateExpectedResults(affectedRows);
+        }
+
+        internal static IReadOnlyList<int> GetUpdateExpectedResults(int 
affectedRows)
+        {
+            return
+                [
+                    -1, // DROP TABLE
+                    -1, // CREATE TABLE
+                    affectedRows,  // INSERT
+                    affectedRows,  // INSERT
+                    affectedRows,  // INSERT
+                ];
+        }
+    }
+}
diff --git a/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs 
b/csharp/test/Drivers/Apache/Hive2/DateTimeValueTests.cs
similarity index 57%
copy from csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs
copy to csharp/test/Drivers/Apache/Hive2/DateTimeValueTests.cs
index 9c03a3308..33e70193a 100644
--- a/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs
+++ b/csharp/test/Drivers/Apache/Hive2/DateTimeValueTests.cs
@@ -15,24 +15,24 @@
 * limitations under the License.
 */
 
-using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
+using System;
+using System.Globalization;
+using System.Threading.Tasks;
+using Apache.Arrow.Adbc.Drivers.Apache.Spark;
+using Xunit;
+using Xunit.Abstractions;
 
-namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
 {
-    internal class ImpalaStatement : HiveServer2Statement
+    public class DateTimeValueTests : 
Common.DateTimeValueTests<ApacheTestConfiguration, HiveServer2TestEnvironment>
     {
-        internal ImpalaStatement(ImpalaConnection connection)
-            : base(connection)
-        {
-            ValidateOptions(connection.Properties);
-        }
+        public DateTimeValueTests(ITestOutputHelper output)
+            : base(output, new HiveServer2TestEnvironment.Factory())
+        { }
 
-        /// <summary>
-        /// Provides the constant string key values to the <see 
cref="AdbcStatement.SetOption(string, string)" /> method.
-        /// </summary>
-        public sealed class Options : ApacheParameters
+        protected override string GetFormattedTimestampValue(string value)
         {
-            // options specific to Impala go here
+            return "TO_TIMESTAMP(" + QuoteValue(value) + ")";
         }
     }
 }
diff --git a/csharp/test/Drivers/Apache/Hive2/DriverTests.cs 
b/csharp/test/Drivers/Apache/Hive2/DriverTests.cs
new file mode 100644
index 000000000..df915c0ca
--- /dev/null
+++ b/csharp/test/Drivers/Apache/Hive2/DriverTests.cs
@@ -0,0 +1,152 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Collections.Generic;
+using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
+{
+    public class DriverTests : Common.DriverTests<ApacheTestConfiguration, 
HiveServer2TestEnvironment>
+    {
+        public DriverTests(ITestOutputHelper? outputHelper)
+            : base(outputHelper, new HiveServer2TestEnvironment.Factory())
+        {
+        }
+
+        [SkippableTheory]
+        [MemberData(nameof(CatalogNamePatternData))]
+        public override void CanGetObjectsCatalogs(string? pattern)
+        {
+            GetObjectsCatalogsTest(pattern);
+        }
+
+        [SkippableTheory]
+        [MemberData(nameof(DbSchemasNamePatternData))]
+        public override void CanGetObjectsDbSchemas(string dbSchemaPattern)
+        {
+            GetObjectsDbSchemasTest(dbSchemaPattern);
+        }
+
+        [SkippableTheory]
+        [MemberData(nameof(TableNamePatternData))]
+        public override void CanGetObjectsTables(string tableNamePattern)
+        {
+            GetObjectsTablesTest(tableNamePattern);
+        }
+
+        public override void CanDetectInvalidServer()
+        {
+            AdbcDriver driver = NewDriver;
+            Assert.NotNull(driver);
+            Dictionary<string, string> parameters = 
GetDriverParameters(TestConfiguration);
+
+            bool hasUri = parameters.TryGetValue(AdbcOptions.Uri, out var uri) 
&& !string.IsNullOrEmpty(uri);
+            bool hasHostName = 
parameters.TryGetValue(HiveServer2Parameters.HostName, out var hostName) && 
!string.IsNullOrEmpty(hostName);
+            if (hasUri)
+            {
+                parameters[AdbcOptions.Uri] = 
"http://unknownhost.azure.com/cliservice";;
+            }
+            else if (hasHostName)
+            {
+                parameters[HiveServer2Parameters.HostName] = 
"unknownhost.azure.com";
+            }
+            else
+            {
+                Assert.Fail($"Unexpected configuration. Must provide 
'{AdbcOptions.Uri}' or '{HiveServer2Parameters.HostName}'.");
+            }
+
+            AdbcDatabase database = driver.Open(parameters);
+            AggregateException exception = 
Assert.ThrowsAny<AggregateException>(() => database.Connect(parameters));
+            OutputHelper?.WriteLine(exception.Message);
+        }
+
+        public override void CanDetectInvalidAuthentication()
+        {
+            AdbcDriver driver = NewDriver;
+            Assert.NotNull(driver);
+            Dictionary<string, string> parameters = 
GetDriverParameters(TestConfiguration);
+
+            bool hasUsername = parameters.TryGetValue(AdbcOptions.Username, 
out var username) && !string.IsNullOrEmpty(username);
+            bool hasPassword = parameters.TryGetValue(AdbcOptions.Password, 
out var password) && !string.IsNullOrEmpty(password);
+            if (hasUsername && hasPassword)
+            {
+                parameters[AdbcOptions.Password] = "invalid-password";
+            }
+            else
+            {
+                Assert.Fail($"Unexpected configuration. Must provide 
'{AdbcOptions.Username}' and '{AdbcOptions.Password}'.");
+            }
+
+            AdbcDatabase database = driver.Open(parameters);
+            AggregateException exception = 
Assert.ThrowsAny<AggregateException>(() => database.Connect(parameters));
+            OutputHelper?.WriteLine(exception.Message);
+        }
+
+        protected override IReadOnlyList<int> GetUpdateExpectedResults()
+        {
+            int affectedRows = ValidateAffectedRows ? 1 : -1;
+            return ClientTests.GetUpdateExpectedResults(affectedRows);
+        }
+
+        public static IEnumerable<object[]> CatalogNamePatternData()
+        {
+            string? catalogName = new 
DriverTests(null).TestConfiguration?.Metadata?.Catalog;
+            return GetPatterns(catalogName);
+        }
+
+        public static IEnumerable<object[]> DbSchemasNamePatternData()
+        {
+            string? dbSchemaName = new 
DriverTests(null).TestConfiguration?.Metadata?.Schema;
+            return GetPatterns(dbSchemaName);
+        }
+
+        public static IEnumerable<object[]> TableNamePatternData()
+        {
+            string? tableName = new 
DriverTests(null).TestConfiguration?.Metadata?.Table;
+            return GetPatterns(tableName);
+        }
+
+        protected override bool TypeHasColumnSize(Metadata.AdbcColumn column)
+        {
+            switch (column.XdbcDataType!.Value)
+            {
+                case (short)SupportedDriverDataType.DECIMAL:
+                case (short)SupportedDriverDataType.NUMERIC:
+                case (short)SupportedDriverDataType.CHAR:
+                case (short)SupportedDriverDataType.VARCHAR:
+                    return true;
+                default:
+                    return false;
+            }
+        }
+
+        protected override bool TypeHasDecimalDigits(Metadata.AdbcColumn 
column)
+        {
+            switch (column.XdbcDataType!.Value)
+            {
+                case (short)SupportedDriverDataType.DECIMAL:
+                case (short)SupportedDriverDataType.NUMERIC:
+                    return true;
+                default:
+                    return false;
+            }
+        }
+    }
+}
diff --git a/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs 
b/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs
index f141b293c..79829ae40 100644
--- a/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs
+++ b/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs
@@ -16,25 +16,209 @@
 */
 
 using System;
+using System.Collections.Generic;
+using System.Data.SqlTypes;
+using System.Text;
+using Apache.Arrow.Adbc.Drivers.Apache;
 using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
+using Apache.Arrow.Adbc.Tests.Drivers.Apache.Common;
+using Apache.Arrow.Types;
 
 namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
 {
-    public abstract class HiveServer2TestEnvironment<TConfig> : 
TestEnvironment<TConfig>
-        where TConfig : TestConfiguration
+    public class HiveServer2TestEnvironment : 
CommonTestEnvironment<ApacheTestConfiguration>
     {
-        public HiveServer2TestEnvironment(Func<AdbcConnection> getConnection)
-            : base(getConnection)
+        public class Factory : Factory<HiveServer2TestEnvironment>
         {
+            public override HiveServer2TestEnvironment 
Create(Func<AdbcConnection> getConnection) => new(getConnection);
         }
 
-        internal DataTypeConversion DataTypeConversion => 
((HiveServer2Connection)Connection).DataTypeConversion;
+        private HiveServer2TestEnvironment(Func<AdbcConnection> getConnection) 
: base(getConnection) { }
 
-        public string? GetValueForProtocolVersion(string? unconvertedValue, 
string? convertedValue) =>
-            
((HiveServer2Connection)Connection).DataTypeConversion.HasFlag(DataTypeConversion.None)
 ? unconvertedValue : convertedValue;
+        public override string TestConfigVariable => "HIVE_TEST_CONFIG_FILE";
 
-        public object? GetValueForProtocolVersion(object? unconvertedValue, 
object? convertedValue) =>
-            
((HiveServer2Connection)Connection).DataTypeConversion.HasFlag(DataTypeConversion.None)
 ? unconvertedValue : convertedValue;
+        public override string SqlDataResourceLocation => 
"Hive2/Resources/HiveData.sql";
 
+        public override int ExpectedColumnCount => 17;
+
+        public override AdbcDriver CreateNewDriver() => new 
HiveServer2Driver();
+
+        public override string GetCreateTemporaryTableStatement(string 
tableName, string columns)
+        {
+            return string.Format("CREATE TABLE {0} ({1})", tableName, columns);
+        }
+
+        public override string Delimiter => "`";
+
+        public override Dictionary<string, string> 
GetDriverParameters(ApacheTestConfiguration testConfiguration)
+        {
+            Dictionary<string, string> parameters = 
new(StringComparer.OrdinalIgnoreCase);
+
+            if (!string.IsNullOrEmpty(testConfiguration.HostName))
+            {
+                parameters.Add(HiveServer2Parameters.HostName, 
testConfiguration.HostName!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.Uri))
+            {
+                parameters.Add(AdbcOptions.Uri, testConfiguration.Uri!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.Port))
+            {
+                parameters.Add(HiveServer2Parameters.Port, 
testConfiguration.Port!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.Path))
+            {
+                parameters.Add(HiveServer2Parameters.Path, 
testConfiguration.Path!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.Username))
+            {
+                parameters.Add(AdbcOptions.Username, 
testConfiguration.Username!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.Password))
+            {
+                parameters.Add(AdbcOptions.Password, 
testConfiguration.Password!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.AuthType))
+            {
+                parameters.Add(HiveServer2Parameters.AuthType, 
testConfiguration.AuthType!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.Type))
+            {
+                parameters.Add(HiveServer2Parameters.TransportType, 
testConfiguration.Type!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.DataTypeConversion))
+            {
+                parameters.Add(HiveServer2Parameters.DataTypeConv, 
testConfiguration.DataTypeConversion!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.TlsOptions))
+            {
+                parameters.Add(HiveServer2Parameters.TLSOptions, 
testConfiguration.TlsOptions!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.BatchSize))
+            {
+                parameters.Add(ApacheParameters.BatchSize, 
testConfiguration.BatchSize!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.PollTimeMilliseconds))
+            {
+                parameters.Add(ApacheParameters.PollTimeMilliseconds, 
testConfiguration.PollTimeMilliseconds!);
+            }
+            if 
(!string.IsNullOrEmpty(testConfiguration.ConnectTimeoutMilliseconds))
+            {
+                
parameters.Add(HiveServer2Parameters.ConnectTimeoutMilliseconds, 
testConfiguration.ConnectTimeoutMilliseconds!);
+            }
+            if (!string.IsNullOrEmpty(testConfiguration.QueryTimeoutSeconds))
+            {
+                parameters.Add(ApacheParameters.QueryTimeoutSeconds, 
testConfiguration.QueryTimeoutSeconds!);
+            }
+            return parameters;
+        }
+
+        public override string VendorVersion => 
((HiveServer2Connection)Connection).VendorVersion;
+
+        public override bool SupportsDelete => false;
+
+        public override bool SupportsUpdate => false;
+
+        public override bool SupportCatalogName => false;
+
+        public override bool ValidateAffectedRows => false;
+
+        public override string GetInsertStatement(string tableName, string 
columnName, string? value) =>
+            string.Format("INSERT INTO {0} ({1}) SELECT {2}", tableName, 
columnName, value ?? "NULL");
+
+        /// <summary>
+        /// Get a <see cref="SampleDataBuilder"/> for data source specific
+        /// data types.
+        /// </summary>
+        /// <returns></returns>
+        /// <remarks>
+        /// </remarks>
+        public override SampleDataBuilder GetSampleDataBuilder()
+        {
+            SampleDataBuilder sampleDataBuilder = new();
+            bool dataTypeIsFloat = 
DataTypeConversion.HasFlag(DataTypeConversion.Scalar);
+            Type floatNetType = dataTypeIsFloat ? typeof(float) : 
typeof(double);
+            Type floatArrowType = dataTypeIsFloat ? typeof(FloatType) : 
typeof(DoubleType);
+            object floatValue;
+            if (dataTypeIsFloat)
+                floatValue = 1f;
+            else
+                floatValue = 1d;
+
+            // standard values
+            sampleDataBuilder.Samples.Add(
+                new SampleData()
+                {
+                    Query = "SELECT " +
+                            "CAST(1 as BIGINT) as id, " +
+                            "CAST(2 as INTEGER) as intcol, " +
+                            "CAST(1 as FLOAT) as number_float, " +
+                            "CAST(4.56 as DOUBLE) as number_double, " +
+                            "CAST(4.56 as DECIMAL(3,2)) as decimalcol, " +
+                            "CAST(9.9999999999999999999999999999999999999 as 
DECIMAL(38,37)) as big_decimal, " +
+                            "CAST(True as BOOLEAN) as is_active, " +
+                            "'John Doe' as name, " +
+                            "CAST('abc123' as BINARY) as datacol, " +
+                            "DATE '2023-09-08' as datecol, " +
+                            "CAST('2023-09-08 12:34:56+00:00' as TIMESTAMP) as 
timestampcol, " +
+                            "CAST('2023-09-08 12:34:56+00:00' as TIMESTAMP) + 
INTERVAL 20 YEARS as intervalcol, " +
+                            "ARRAY(1, 2, 3) as numbers, " +
+                            "named_struct('name', 'John Doe', 'age', 30) as 
person, " +
+                            "map('name', CAST('Jane Doe' AS STRING), 'age', 
CAST(29 AS INT)) as personmap"
+                            ,
+                    ExpectedValues =
+                    [
+                        new("id", typeof(long), typeof(Int64Type), 1L),
+                        new("intcol", typeof(int), typeof(Int32Type), 2),
+                        new("number_float", typeof(float), typeof(FloatType), 
1f),
+                        new("number_double", typeof(double), 
typeof(DoubleType), 4.56),
+                        new("decimalcol", typeof(SqlDecimal), 
typeof(Decimal128Type), SqlDecimal.Parse("4.56")),
+                        new("big_decimal", typeof(SqlDecimal), 
typeof(Decimal128Type), 
SqlDecimal.Parse("9.9999999999999999999999999999999999999")),
+                        new("is_active", typeof(bool), typeof(BooleanType), 
true),
+                        new("name", typeof(string), typeof(StringType), "John 
Doe"),
+                        new("datacol", typeof(byte[]), typeof(BinaryType), 
UTF8Encoding.UTF8.GetBytes("abc123")),
+                        new("datecol", typeof(DateTime), typeof(Date32Type), 
new DateTime(2023, 9, 8)),
+                        new("timestampcol", typeof(DateTimeOffset), 
typeof(TimestampType), new DateTimeOffset(new DateTime(2023, 9, 8, 12, 34, 56), 
TimeSpan.Zero)),
+                        new("intervalcol", typeof(DateTimeOffset), 
typeof(TimestampType), new DateTimeOffset(new DateTime(2043, 9, 8, 12, 34, 56), 
TimeSpan.Zero)),
+                        new("numbers", typeof(string), typeof(StringType), 
"[1,2,3]"),
+                        new("person", typeof(string), typeof(StringType), 
"""{"name":"John Doe","age":30}"""),
+                        new("personmap", typeof(string), typeof(StringType), 
"""{"name":"Jane Doe","age":"29"}""")
+                     ]
+                });
+
+            sampleDataBuilder.Samples.Add(
+                new SampleData()
+                {
+                    Query = "SELECT " +
+                            "CAST(NULL as BIGINT) as id, " +
+                            "CAST(NULL as INTEGER) as intcol, " +
+                            "CAST(NULL as FLOAT) as number_float, " +
+                            "CAST(NULL as DOUBLE) as number_double, " +
+                            "CAST(NULL as DECIMAL(3,2)) as decimalcol, " +
+                            "CAST(NULL as DECIMAL(38,37)) as big_decimal, " +
+                            "CAST(NULL as BOOLEAN) as is_active, " +
+                            "CAST(NULL as STRING) as name, " +
+                            "CAST(NULL as BINARY) as datacol, " +
+                            "CAST(NULL as DATE) as datecol, " +
+                            "CAST(NULL as TIMESTAMP) as timestampcol"
+                            ,
+                    ExpectedValues =
+                    [
+                        new("id", typeof(long), typeof(Int64Type), null),
+                        new("intcol", typeof(int), typeof(Int32Type), null),
+                        new("number_float", typeof(float), typeof(FloatType), 
null),
+                        new("number_double", typeof(double), 
typeof(DoubleType), null),
+                        new("decimalcol", typeof(SqlDecimal), 
typeof(Decimal128Type), null),
+                        new("big_decimal", typeof(SqlDecimal), 
typeof(Decimal128Type), null),
+                        new("is_active", typeof(bool), typeof(BooleanType), 
null),
+                        new("name", typeof(string), typeof(StringType), null),
+                        new("datacol", typeof(byte[]), typeof(BinaryType), 
null),
+                        new("datecol", typeof(DateTime), typeof(Date32Type), 
null),
+                        new("timestampcol", typeof(DateTimeOffset), 
typeof(TimestampType), null)
+                     ]
+                });
+
+            return sampleDataBuilder;
+        }
     }
 }
diff --git a/csharp/test/Drivers/Apache/Hive2/NumericValueTests.cs 
b/csharp/test/Drivers/Apache/Hive2/NumericValueTests.cs
new file mode 100644
index 000000000..4e18de856
--- /dev/null
+++ b/csharp/test/Drivers/Apache/Hive2/NumericValueTests.cs
@@ -0,0 +1,63 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System.Threading.Tasks;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
+{
+    public class NumericValueTests : 
Common.NumericValueTests<ApacheTestConfiguration, HiveServer2TestEnvironment>
+    {
+        public NumericValueTests(ITestOutputHelper output)
+            : base(output, new HiveServer2TestEnvironment.Factory())
+        {
+        }
+
+        [SkippableTheory]
+        [InlineData(0)]
+        [InlineData(0.2)]
+        [InlineData(15e-03)]
+        [InlineData(1.234E+2)]
+        [InlineData(double.MaxValue)]
+        [InlineData(double.MinValue)]
+        // TODO: Solve server issue with selecting infinity.
+        //[InlineData(double.NegativeInfinity)]
+        //[InlineData(double.PositiveInfinity)]
+        public override Task TestDoubleValuesInsertSelectDelete(double value)
+        {
+            return base.TestDoubleValuesInsertSelectDelete(value);
+        }
+
+        [SkippableTheory]
+        [InlineData(0)]
+        [InlineData(25)]
+        [InlineData(25.1)]
+        [InlineData(0.2)]
+        [InlineData(15e-03)]
+        [InlineData(1.234E+2)]
+        [InlineData(float.MaxValue)]
+        // TODO: Solve server issue with selecting infinity.
+        //[InlineData(float.NegativeInfinity)]
+        //[InlineData(float.PositiveInfinity)]
+        //[InlineData(float.MinValue)]
+        public override Task TestFloatValuesInsertSelectDelete(float value)
+        {
+            return base.TestFloatValuesInsertSelectDelete(value);
+        }
+    }
+}
diff --git a/csharp/test/Drivers/Apache/Hive2/Resources/HiveData.sql 
b/csharp/test/Drivers/Apache/Hive2/Resources/HiveData.sql
new file mode 100644
index 000000000..6a65607c9
--- /dev/null
+++ b/csharp/test/Drivers/Apache/Hive2/Resources/HiveData.sql
@@ -0,0 +1,105 @@
+
+ -- Licensed to the Apache Software Foundation (ASF) under one or more
+ -- contributor license agreements.  See the NOTICE file distributed with
+ -- this work for additional information regarding copyright ownership.
+ -- The ASF licenses this file to You under the Apache License, Version 2.0
+ -- (the "License"); you may not use this file except in compliance with
+ -- the License.  You may obtain a copy of the License at
+
+ --    http://www.apache.org/licenses/LICENSE-2.0
+
+ -- Unless required by applicable law or agreed to in writing, software
+ -- distributed under the License is distributed on an "AS IS" BASIS,
+ -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ -- See the License for the specific language governing permissions and
+ -- limitations under the License.
+DROP TABLE IF EXISTS {ADBC_CATALOG}.{ADBC_DATASET}.{ADBC_TABLE};
+
+CREATE TABLE IF NOT EXISTS {ADBC_CATALOG}.{ADBC_DATASET}.{ADBC_TABLE} (
+  longCol BIGINT,
+  byteCol TINYINT,
+  shortCol SMALLINT,
+  integerCol INT,
+  floatCol FLOAT,
+  doubleCol DOUBLE,
+  decimalCol DECIMAL(38, 9),
+  boolCol BOOLEAN,
+  stringCol STRING,
+  binaryCol BINARY,
+  dateCol DATE,
+  timestampCol TIMESTAMP,
+  arrayCol ARRAY<INT>,
+  structCol STRUCT<
+    col1: STRING,
+    col2: INT
+  >,
+  mapCol MAP<
+    INT,
+    STRING
+  >,
+  varcharCol STRING,
+  charCol CHAR(10)
+);
+
+INSERT INTO {ADBC_CATALOG}.{ADBC_DATASET}.{ADBC_TABLE}
+    SELECT
+    1,
+    2,
+    3,
+    4,
+    7.89,
+    1.23,
+    4.56,
+    TRUE,
+    'John Doe',
+    UNHEX('616263313233'),
+    to_date('2023-09-08'),
+    unix_timestamp('1970-01-01 00:00:00', 'yyyy-MM-dd HH:mm:ss'),
+    ARRAY(1, 2, 3),
+    STRUCT('John Doe', 30),
+    MAP(1, 'John Doe'),
+    'John Doe',
+    'John Doe'
+;
+
+INSERT INTO {ADBC_CATALOG}.{ADBC_DATASET}.{ADBC_TABLE}
+    SELECT
+    2,
+    127,
+    32767,
+    2147483647,
+    3.4028234663852886e+38,
+    1.7976931348623157e+308,
+    CAST(9.99999999999999999999999999999999E+28 AS DECIMAL(38, 9)),
+    FALSE,
+    'Jane Doe',
+    UNHEX('646566343536'),
+    to_date('2023-09-09'),
+    unix_timestamp('1970-01-01 00:00:00', 'yyyy-MM-dd HH:mm:ss'),
+    ARRAY(4, 5, 6),
+    STRUCT('Jane Doe', 40),
+    MAP(1, 'John Doe'),
+    'Jane Doe',
+    'Jane Doe'
+;
+
+INSERT INTO {ADBC_CATALOG}.{ADBC_DATASET}.{ADBC_TABLE}
+    SELECT
+    3,
+    -128,
+    -32768,
+    -2147483648,
+    -3.4028234663852886e+38,
+    -1.7976931348623157e+308,
+    CAST(-9.99999999999999999999999999999999E+28 AS DECIMAL(38, 9)),
+    FALSE,
+    'Jack Doe',
+    UNHEX('646566343536'),
+    to_date('1556-01-02'),
+    unix_timestamp('1970-01-01 00:00:00', 'yyyy-MM-dd HH:mm:ss'),
+    ARRAY(7, 8, 9),
+    STRUCT('Jack Doe', 50),
+    MAP(1, 'John Doe'),
+    'Jack Doe',
+    'Jack Doe'
+;
diff --git a/csharp/test/Drivers/Apache/Hive2/Resources/hiveconfig-http.json 
b/csharp/test/Drivers/Apache/Hive2/Resources/hiveconfig-http.json
new file mode 100644
index 000000000..7f8e69b1e
--- /dev/null
+++ b/csharp/test/Drivers/Apache/Hive2/Resources/hiveconfig-http.json
@@ -0,0 +1,15 @@
+{
+    "uri": "http://<hostName>:10000/cliservice",
+    "auth_type": "basic",
+    "username": "<username>",
+    "password": "<password>",
+    "type": "http",
+    "data_type_conv": "none",
+    "query": "",
+    "expectedResults": 0,
+    "metadata": {
+        "schema": "default",
+        "table": "<tableName>",
+        "expectedColumnCount": 17
+    }
+}
diff --git a/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs 
b/csharp/test/Drivers/Apache/Hive2/StatementTests.cs
similarity index 58%
copy from csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs
copy to csharp/test/Drivers/Apache/Hive2/StatementTests.cs
index 9c03a3308..808b917eb 100644
--- a/csharp/src/Drivers/Apache/Impala/ImpalaStatement.cs
+++ b/csharp/test/Drivers/Apache/Hive2/StatementTests.cs
@@ -15,24 +15,17 @@
 * limitations under the License.
 */
 
-using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
+using Apache.Arrow.Adbc.Tests.Drivers.Apache.Common;
+using Xunit;
+using Xunit.Abstractions;
 
-namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
 {
-    internal class ImpalaStatement : HiveServer2Statement
+    public class StatementTests : 
Common.StatementTests<ApacheTestConfiguration, HiveServer2TestEnvironment>
     {
-        internal ImpalaStatement(ImpalaConnection connection)
-            : base(connection)
+        public StatementTests(ITestOutputHelper? outputHelper)
+            : base(outputHelper, new HiveServer2TestEnvironment.Factory())
         {
-            ValidateOptions(connection.Properties);
-        }
-
-        /// <summary>
-        /// Provides the constant string key values to the <see 
cref="AdbcStatement.SetOption(string, string)" /> method.
-        /// </summary>
-        public sealed class Options : ApacheParameters
-        {
-            // options specific to Impala go here
         }
     }
 }
diff --git a/csharp/test/Drivers/Apache/Hive2/StringValueTests.cs 
b/csharp/test/Drivers/Apache/Hive2/StringValueTests.cs
new file mode 100644
index 000000000..d84bfd67e
--- /dev/null
+++ b/csharp/test/Drivers/Apache/Hive2/StringValueTests.cs
@@ -0,0 +1,43 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System.Threading.Tasks;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
+{
+    public class StringValueTests(ITestOutputHelper output)
+        : Common.StringValueTests<ApacheTestConfiguration, 
HiveServer2TestEnvironment>(output, new HiveServer2TestEnvironment.Factory())
+    {
+        [SkippableTheory]
+        [InlineData("String whose length is too long for VARCHAR(10).", new 
string[] { "Error while compiling statement", "ParseException", "extraneous 
input 'who' expecting EOF near '<EOF>'" }, "42000")]
+        protected async Task TestVarcharExceptionDataImpala(string value, 
string[] expectedTexts, string? expectedSqlState)
+        {
+            await base.TestVarcharExceptionData(value, expectedTexts, 
expectedSqlState);
+        }
+
+        [SkippableTheory]
+        [InlineData(null)]
+        [InlineData("")]
+        [InlineData(" Leading and trailing spaces ")]
+        internal override Task TestCharData(string? value)
+        {
+            return base.TestCharData(value);
+        }
+    }
+}
diff --git a/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs 
b/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs
index 9b3573f69..4cb763bfb 100644
--- a/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs
+++ b/csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs
@@ -22,12 +22,12 @@ using System.Text;
 using Apache.Arrow.Adbc.Drivers.Apache;
 using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
 using Apache.Arrow.Adbc.Drivers.Apache.Impala;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
+using Apache.Arrow.Adbc.Tests.Drivers.Apache.Common;
 using Apache.Arrow.Types;
 
 namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Impala
 {
-    public class ImpalaTestEnvironment : 
HiveServer2TestEnvironment<ApacheTestConfiguration>
+    public class ImpalaTestEnvironment : 
CommonTestEnvironment<ApacheTestConfiguration>
     {
         public class Factory : Factory<ImpalaTestEnvironment>
         {
diff --git a/csharp/test/Drivers/Apache/Spark/SparkConnectionTest.cs 
b/csharp/test/Drivers/Apache/Spark/SparkConnectionTest.cs
index 34e971bd8..417885e0b 100644
--- a/csharp/test/Drivers/Apache/Spark/SparkConnectionTest.cs
+++ b/csharp/test/Drivers/Apache/Spark/SparkConnectionTest.cs
@@ -293,7 +293,7 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
             public InvalidConnectionParametersTestData()
             {
                 Add(new([], typeof(ArgumentException)));
-                Add(new(new() { [SparkParameters.Type] = " " }, 
typeof(ArgumentException)));
+                Add(new(new() { [SparkParameters.Type] = " " }, 
typeof(ArgumentOutOfRangeException)));
                 Add(new(new() { [SparkParameters.Type] = "xxx" }, 
typeof(ArgumentOutOfRangeException)));
                 Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Standard }, typeof(ArgumentOutOfRangeException)));
                 Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Http, [SparkParameters.HostName] = " " }, 
typeof(ArgumentException)));
@@ -310,12 +310,13 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
                 Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.HostName] = 
"valid.server.com", [SparkParameters.Token] = "abcdef", [SparkParameters.Port] 
= "-1" }, typeof(ArgumentOutOfRangeException)));
                 Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.HostName] = 
"valid.server.com", [SparkParameters.Token] = "abcdef", [SparkParameters.Port] 
= IPEndPoint.MinPort.ToString(CultureInfo.InvariantCulture) }, 
typeof(ArgumentOutOfRangeException)));
                 Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.HostName] = 
"valid.server.com", [SparkParameters.Token] = "abcdef", [SparkParameters.Port] 
= (IPEndPoint.MaxPort + 1).ToString(CultureInfo.InvariantCulture) }, 
typeof(ArgumentOutOfRangeException)));
-                Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.HostName] = 
"valid.server.com", [SparkParameters.Token] = "abcdef", [AdbcOptions.Uri] = 
"httpxxz://hostname.com" }, typeof(ArgumentOutOfRangeException)));
-                Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.HostName] = 
"valid.server.com", [SparkParameters.Token] = "abcdef", [AdbcOptions.Uri] = 
"http-//hostname.com" }, typeof(UriFormatException)));
-                Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.HostName] = 
"valid.server.com", [SparkParameters.Token] = "abcdef", [AdbcOptions.Uri] = 
"httpxxz://hostname.com:1234567890" }, typeof(UriFormatException)));
+                Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.Token] = "abcdef", 
[AdbcOptions.Uri] = "httpxxz://hostname.com" }, 
typeof(ArgumentOutOfRangeException)));
+                Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.Token] = "abcdef", 
[AdbcOptions.Uri] = "http-//hostname.com" }, typeof(ArgumentException)));
+                Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.Token] = "abcdef", 
[AdbcOptions.Uri] = "httpxxz://hostname.com:1234567890" }, 
typeof(ArgumentException)));
                 Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Http, [SparkParameters.HostName] = "valid.server.com", 
[AdbcOptions.Username] = "user", [AdbcOptions.Password] = "myPassword", 
[SparkParameters.ConnectTimeoutMilliseconds] = ((long)int.MaxValue + 
1).ToString() }, typeof(ArgumentOutOfRangeException)));
                 Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Http, [SparkParameters.HostName] = "valid.server.com", 
[AdbcOptions.Username] = "user", [AdbcOptions.Password] = "myPassword", 
[SparkParameters.ConnectTimeoutMilliseconds] = "non-numeric" }, 
typeof(ArgumentOutOfRangeException)));
                 Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Http, [SparkParameters.HostName] = "valid.server.com", 
[AdbcOptions.Username] = "user", [AdbcOptions.Password] = "myPassword", 
[SparkParameters.ConnectTimeoutMilliseconds] = "" }, 
typeof(ArgumentOutOfRangeException)));
+                Add(new(new() { [SparkParameters.Type] = 
SparkServerTypeConstants.Databricks, [SparkParameters.Token] = "abcdef", 
[SparkParameters.HostName] = "valid.server.com", [AdbcOptions.Uri] = 
"http://valid.hostname.com"; }, typeof(ArgumentOutOfRangeException)));
             }
         }
     }
diff --git a/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs 
b/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
index 16a550111..ca0fb1fe7 100644
--- a/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
+++ b/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
@@ -22,12 +22,12 @@ using System.Text;
 using Apache.Arrow.Adbc.Drivers.Apache;
 using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
 using Apache.Arrow.Adbc.Drivers.Apache.Spark;
-using Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2;
+using Apache.Arrow.Adbc.Tests.Drivers.Apache.Common;
 using Apache.Arrow.Types;
 
 namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
 {
-    public class SparkTestEnvironment : 
HiveServer2TestEnvironment<SparkTestConfiguration>
+    public class SparkTestEnvironment : 
CommonTestEnvironment<SparkTestConfiguration>
     {
         public class Factory : Factory<SparkTestEnvironment>
         {

Reply via email to