This is an automated email from the ASF dual-hosted git repository.
curth pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-adbc.git
The following commit(s) were added to refs/heads/main by this push:
new 148bc1752 feat(csharp/src/Drivers/Databricks): Integrate
ProxyConfigurations with Drivers (#2794)
148bc1752 is described below
commit 148bc175294fade563685e1ad8b732eb6046474d
Author: Todd Meng <[email protected]>
AuthorDate: Mon May 19 10:27:12 2025 -0700
feat(csharp/src/Drivers/Databricks): Integrate ProxyConfigurations with
Drivers (#2794)
Integrates proxy logic with the rest of the Spark-Driver. Also applies
to Cloud-fetch and OAuth2 fetch.
Other drivers, like Impala driver, should be compatible with the change.
However, it is not tested - so I marked in the README that it's not
fully complete
To test, add the following to `DATABRICKS_TEST_CONFIG_FILE` environment
variable:
```
"http_options": {
"proxy": {
"use_proxy": "true",
"proxy_host": "...",
"proxy_port":..,
"proxy_auth": "...", // if using basic auth
"proxy_uid": "...",
"proxy_pwd": "..."
},
"tls": { // you may need to enable depending on your proxy, or you need
to trust the cert
"enabled": ,
"allow_self_signed":
}
}
```
And inspect proxy traffic. Verified that it works in driver testing and
Powerbi testing
---
.../Apache/Hive2/HiveServer2HttpConnection.cs | 5 +++-
.../Apache/Hive2/HiveServer2ProxyConfigurator.cs | 4 +--
.../src/Drivers/Apache/Hive2/HiveServer2TlsImpl.cs | 3 +-
csharp/src/Drivers/Apache/Hive2/README.md | 7 +++++
.../Drivers/Apache/Impala/ImpalaHttpConnection.cs | 5 +++-
.../Drivers/Apache/Spark/SparkHttpConnection.cs | 5 +++-
.../Auth/OAuthClientCredentialsProvider.cs | 4 ++-
.../CloudFetch/CloudFetchDownloadManager.cs | 9 ++----
.../Databricks/CloudFetch/CloudFetchReader.cs | 6 ++--
.../src/Drivers/Databricks/DatabricksConnection.cs | 7 ++++-
.../test/Drivers/Apache/ApacheTestConfiguration.cs | 27 +++++++++++++++++
.../Apache/Hive2/HiveServer2TestEnvironment.cs | 34 ++++++++++++++++++++++
.../Drivers/Apache/Spark/SparkTestEnvironment.cs | 34 ++++++++++++++++++++++
.../Auth/OAuthClientCredentialsProviderTests.cs | 10 +++----
.../Databricks/DatabricksTestEnvironment.cs | 34 ++++++++++++++++++++++
15 files changed, 172 insertions(+), 22 deletions(-)
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2HttpConnection.cs
b/csharp/src/Drivers/Apache/Hive2/HiveServer2HttpConnection.cs
index 0bba4735c..860696a5b 100644
--- a/csharp/src/Drivers/Apache/Hive2/HiveServer2HttpConnection.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2HttpConnection.cs
@@ -36,8 +36,11 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
{
private const string BasicAuthenticationScheme = "Basic";
+ private readonly HiveServer2ProxyConfigurator _proxyConfigurator;
+
public HiveServer2HttpConnection(IReadOnlyDictionary<string, string>
properties) : base(properties)
{
+ _proxyConfigurator =
HiveServer2ProxyConfigurator.FromProperties(properties);
}
protected override void ValidateAuthentication()
@@ -138,7 +141,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
Uri baseAddress = GetBaseAddress(uri, hostName, path, port,
HiveServer2Parameters.HostName, TlsOptions.IsTlsEnabled);
AuthenticationHeaderValue? authenticationHeaderValue =
GetAuthenticationHeaderValue(authTypeValue, username, password);
- HttpClientHandler httpClientHandler =
HiveServer2TlsImpl.NewHttpClientHandler(TlsOptions);
+ HttpClientHandler httpClientHandler =
HiveServer2TlsImpl.NewHttpClientHandler(TlsOptions, _proxyConfigurator);
httpClientHandler.AutomaticDecompression =
DecompressionMethods.GZip | DecompressionMethods.Deflate;
HttpClient httpClient = new(httpClientHandler);
httpClient.BaseAddress = baseAddress;
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2ProxyConfigurator.cs
b/csharp/src/Drivers/Apache/Hive2/HiveServer2ProxyConfigurator.cs
index a16efd7f2..acb43c527 100644
--- a/csharp/src/Drivers/Apache/Hive2/HiveServer2ProxyConfigurator.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2ProxyConfigurator.cs
@@ -63,7 +63,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
throw new ArgumentNullException(nameof(proxyPort));
}
- if (proxyAuth)
+ if (useProxy && proxyAuth)
{
if (proxyUid == null)
throw new ArgumentNullException(nameof(proxyUid));
@@ -78,7 +78,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
_proxyUid = proxyUid;
_proxyPwd = proxyPwd;
- if (!string.IsNullOrEmpty(proxyIgnoreList))
+ if (useProxy && !string.IsNullOrEmpty(proxyIgnoreList))
{
_proxyBypassList = ParseProxyIgnoreList(proxyIgnoreList);
}
diff --git a/csharp/src/Drivers/Apache/Hive2/HiveServer2TlsImpl.cs
b/csharp/src/Drivers/Apache/Hive2/HiveServer2TlsImpl.cs
index b856d38d7..069267b1c 100644
--- a/csharp/src/Drivers/Apache/Hive2/HiveServer2TlsImpl.cs
+++ b/csharp/src/Drivers/Apache/Hive2/HiveServer2TlsImpl.cs
@@ -72,7 +72,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
return tlsProperties;
}
- static internal HttpClientHandler NewHttpClientHandler(TlsProperties
tlsProperties)
+ static internal HttpClientHandler NewHttpClientHandler(TlsProperties
tlsProperties, HiveServer2ProxyConfigurator proxyConfigurator)
{
HttpClientHandler httpClientHandler = new();
if (tlsProperties.IsTlsEnabled)
@@ -99,6 +99,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Hive2
return chain2.Build(certificate);
};
}
+ proxyConfigurator.ConfigureProxy(httpClientHandler);
return httpClientHandler;
}
diff --git a/csharp/src/Drivers/Apache/Hive2/README.md
b/csharp/src/Drivers/Apache/Hive2/README.md
index 76040d217..d043bae31 100644
--- a/csharp/src/Drivers/Apache/Hive2/README.md
+++ b/csharp/src/Drivers/Apache/Hive2/README.md
@@ -53,6 +53,13 @@ but can also be passed in the call to `AdbcDatabase.Connect`.
| `adbc.http_options.tls.allow_self_signed` | If self signed tls/ssl
certificate needs to be allowed or not. One of `True`, `False` | `False` |
| `adbc.http_options.tls.allow_hostname_mismatch` | If hostname mismatch is
allowed for ssl. One of `True`, `False` | `False` |
| `adbc.http_options.tls.trusted_certificate_path` | The full path of the
tls/ssl certificate .pem file containing custom CA certificates for verifying
the server when connecting over TLS | `` |
+| `adbc.proxy_options.use_proxy` | Whether to use a proxy for HTTP
connections. Only feature-complete in Spark driver. One of `True`, `False` |
`False` |
+| `adbc.proxy_options.proxy_host` | Hostname or IP address of the proxy
server. Only feature-complete in Spark driver. Required when use_proxy is True
| |
+| `adbc.proxy_options.proxy_port` | Port number of the proxy server. Only
feature-complete in Spark driver. Required when use_proxy is True | |
+| `adbc.proxy_options.proxy_ignore_list` | Comma-separated list of hosts or
domains that should bypass the proxy. Only feature-complete in Spark driver.
For example: "localhost,127.0.0.1,.internal.domain.com". Supports wildcard
patterns like "*.internal.domain.com" | |
+| `adbc.proxy_options.proxy_auth` | Whether to enable proxy authentication.
Only feature-complete in Spark driver. One of `True`, `False` | `False` |
+| `adbc.proxy_options.proxy_uid` | Username for proxy authentication. Only
feature-complete in Spark driver. Required when proxy_auth is True | |
+| `adbc.proxy_options.proxy_pwd` | Password for proxy authentication. Only
feature-complete in Spark driver. Required when proxy_auth is True | |
## Timeout Configuration
diff --git a/csharp/src/Drivers/Apache/Impala/ImpalaHttpConnection.cs
b/csharp/src/Drivers/Apache/Impala/ImpalaHttpConnection.cs
index ebc92e663..eac6170ff 100644
--- a/csharp/src/Drivers/Apache/Impala/ImpalaHttpConnection.cs
+++ b/csharp/src/Drivers/Apache/Impala/ImpalaHttpConnection.cs
@@ -38,8 +38,11 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
{
private const string BasicAuthenticationScheme = "Basic";
+ private readonly HiveServer2ProxyConfigurator _proxyConfigurator;
+
public ImpalaHttpConnection(IReadOnlyDictionary<string, string>
properties) : base(properties)
{
+ _proxyConfigurator =
HiveServer2ProxyConfigurator.FromProperties(properties);
}
protected override void ValidateAuthentication()
@@ -142,7 +145,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Impala
Uri baseAddress = GetBaseAddress(uri, hostName, path, port,
ImpalaParameters.HostName, TlsOptions.IsTlsEnabled);
AuthenticationHeaderValue? authenticationHeaderValue =
GetAuthenticationHeaderValue(authTypeValue, username, password);
- HttpClientHandler httpClientHandler =
HiveServer2TlsImpl.NewHttpClientHandler(TlsOptions);
+ HttpClientHandler httpClientHandler =
HiveServer2TlsImpl.NewHttpClientHandler(TlsOptions, _proxyConfigurator);
HttpClient httpClient = new(httpClientHandler);
httpClient.BaseAddress = baseAddress;
httpClient.DefaultRequestHeaders.Authorization =
authenticationHeaderValue;
diff --git a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
b/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
index 627f5fb49..5ebfb64ca 100644
--- a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
+++ b/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
@@ -39,8 +39,11 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
private const string BasicAuthenticationScheme = "Basic";
private const string BearerAuthenticationScheme = "Bearer";
+ protected readonly HiveServer2ProxyConfigurator _proxyConfigurator;
+
public SparkHttpConnection(IReadOnlyDictionary<string, string>
properties) : base(properties)
{
+ _proxyConfigurator =
HiveServer2ProxyConfigurator.FromProperties(properties);
}
protected override void ValidateAuthentication()
@@ -148,7 +151,7 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
protected virtual HttpMessageHandler CreateHttpHandler()
{
- return HiveServer2TlsImpl.NewHttpClientHandler(TlsOptions);
+ return HiveServer2TlsImpl.NewHttpClientHandler(TlsOptions,
_proxyConfigurator);
}
protected override TTransport CreateTransport()
diff --git
a/csharp/src/Drivers/Databricks/Auth/OAuthClientCredentialsProvider.cs
b/csharp/src/Drivers/Databricks/Auth/OAuthClientCredentialsProvider.cs
index c456c6b71..a54d6588c 100644
--- a/csharp/src/Drivers/Databricks/Auth/OAuthClientCredentialsProvider.cs
+++ b/csharp/src/Drivers/Databricks/Auth/OAuthClientCredentialsProvider.cs
@@ -61,6 +61,7 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks.Auth
/// <summary>
/// Initializes a new instance of the <see
cref="OAuthClientCredentialsService"/> class.
/// </summary>
+ /// <param name="httpClient">The HTTP client to use for
requests.</param>
/// <param name="clientId">The OAuth client ID.</param>
/// <param name="clientSecret">The OAuth client secret.</param>
/// <param name="host">The base host of the Databricks
workspace.</param>
@@ -68,6 +69,7 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks.Auth
/// <param name="timeoutMinutes">The timeout in minutes for HTTP
requests.</param>
/// <param name="refreshBufferMinutes">The number of minutes before
token expiration to refresh the token.</param>
public OAuthClientCredentialsProvider(
+ HttpClient httpClient,
string clientId,
string clientSecret,
string host,
@@ -83,7 +85,7 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks.Auth
_scope = scope ?? throw new ArgumentNullException(nameof(scope));
_tokenEndpoint = DetermineTokenEndpoint();
- _httpClient = new HttpClient();
+ _httpClient = httpClient;
_httpClient.Timeout = TimeSpan.FromMinutes(_timeoutMinutes);
}
diff --git
a/csharp/src/Drivers/Databricks/CloudFetch/CloudFetchDownloadManager.cs
b/csharp/src/Drivers/Databricks/CloudFetch/CloudFetchDownloadManager.cs
index 7eec4d768..bed456b32 100644
--- a/csharp/src/Drivers/Databricks/CloudFetch/CloudFetchDownloadManager.cs
+++ b/csharp/src/Drivers/Databricks/CloudFetch/CloudFetchDownloadManager.cs
@@ -59,7 +59,7 @@ namespace
Apache.Arrow.Adbc.Drivers.Apache.Databricks.CloudFetch
/// <param name="statement">The HiveServer2 statement.</param>
/// <param name="schema">The Arrow schema.</param>
/// <param name="isLz4Compressed">Whether the results are LZ4
compressed.</param>
- public CloudFetchDownloadManager(DatabricksStatement statement, Schema
schema, bool isLz4Compressed)
+ public CloudFetchDownloadManager(DatabricksStatement statement, Schema
schema, bool isLz4Compressed, HttpClient httpClient)
{
_statement = statement ?? throw new
ArgumentNullException(nameof(statement));
_schema = schema ?? throw new
ArgumentNullException(nameof(schema));
@@ -159,11 +159,8 @@ namespace
Apache.Arrow.Adbc.Drivers.Apache.Databricks.CloudFetch
_downloadQueue = new BlockingCollection<IDownloadResult>(new
ConcurrentQueue<IDownloadResult>(), prefetchCount * 2);
_resultQueue = new BlockingCollection<IDownloadResult>(new
ConcurrentQueue<IDownloadResult>(), prefetchCount * 2);
- // Initialize the HTTP client
- _httpClient = new HttpClient
- {
- Timeout = TimeSpan.FromMinutes(timeoutMinutes)
- };
+ _httpClient = httpClient;
+ _httpClient.Timeout = TimeSpan.FromMinutes(timeoutMinutes);
// Initialize the result fetcher
_resultFetcher = new CloudFetchResultFetcher(
diff --git a/csharp/src/Drivers/Databricks/CloudFetch/CloudFetchReader.cs
b/csharp/src/Drivers/Databricks/CloudFetch/CloudFetchReader.cs
index 6a8d3d24b..ba160de68 100644
--- a/csharp/src/Drivers/Databricks/CloudFetch/CloudFetchReader.cs
+++ b/csharp/src/Drivers/Databricks/CloudFetch/CloudFetchReader.cs
@@ -48,7 +48,7 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks.CloudFetch
/// <param name="statement">The Databricks statement.</param>
/// <param name="schema">The Arrow schema.</param>
/// <param name="isLz4Compressed">Whether the results are LZ4
compressed.</param>
- public CloudFetchReader(DatabricksStatement statement, Schema schema,
bool isLz4Compressed)
+ public CloudFetchReader(DatabricksStatement statement, Schema schema,
bool isLz4Compressed, HttpClient httpClient)
{
this.schema = schema;
this.isLz4Compressed = isLz4Compressed;
@@ -71,14 +71,14 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks.CloudFetch
// Initialize the download manager
if (isPrefetchEnabled)
{
- downloadManager = new CloudFetchDownloadManager(statement,
schema, isLz4Compressed);
+ downloadManager = new CloudFetchDownloadManager(statement,
schema, isLz4Compressed, httpClient);
downloadManager.StartAsync().Wait();
}
else
{
// For now, we only support the prefetch implementation
// This flag is reserved for future use if we need to support
a non-prefetch mode
- downloadManager = new CloudFetchDownloadManager(statement,
schema, isLz4Compressed);
+ downloadManager = new CloudFetchDownloadManager(statement,
schema, isLz4Compressed, httpClient);
downloadManager.StartAsync().Wait();
}
}
diff --git a/csharp/src/Drivers/Databricks/DatabricksConnection.cs
b/csharp/src/Drivers/Databricks/DatabricksConnection.cs
index fa62a0c29..99f477c6e 100644
--- a/csharp/src/Drivers/Databricks/DatabricksConnection.cs
+++ b/csharp/src/Drivers/Databricks/DatabricksConnection.cs
@@ -24,6 +24,7 @@ using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using Apache.Arrow.Adbc.Drivers.Apache;
+using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
using Apache.Arrow.Adbc.Drivers.Apache.Spark;
using Apache.Arrow.Adbc.Drivers.Databricks.Auth;
using Apache.Arrow.Adbc.Drivers.Databricks.CloudFetch;
@@ -196,7 +197,10 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks
Properties.TryGetValue(DatabricksParameters.OAuthClientSecret,
out string? clientSecret);
Properties.TryGetValue(DatabricksParameters.OAuthScope, out
string? scope);
+ HttpClient OauthHttpClient = new
HttpClient(HiveServer2TlsImpl.NewHttpClientHandler(TlsOptions,
_proxyConfigurator));
+
var tokenProvider = new OAuthClientCredentialsProvider(
+ OauthHttpClient,
clientId!,
clientSecret!,
host!,
@@ -255,7 +259,8 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks
// Choose the appropriate reader based on the result format
if (resultFormat == TSparkRowSetType.URL_BASED_SET)
{
- return new CloudFetchReader(databricksStatement, schema,
isLz4Compressed);
+ HttpClient cloudFetchHttpClient = new
HttpClient(HiveServer2TlsImpl.NewHttpClientHandler(TlsOptions,
_proxyConfigurator));
+ return new CloudFetchReader(databricksStatement, schema,
isLz4Compressed, cloudFetchHttpClient);
}
else
{
diff --git a/csharp/test/Drivers/Apache/ApacheTestConfiguration.cs
b/csharp/test/Drivers/Apache/ApacheTestConfiguration.cs
index 8c99c6628..91366384f 100644
--- a/csharp/test/Drivers/Apache/ApacheTestConfiguration.cs
+++ b/csharp/test/Drivers/Apache/ApacheTestConfiguration.cs
@@ -68,6 +68,33 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache
{
[JsonPropertyName("tls"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
public TlsTestConfiguration? Tls { get; set; }
+
+ [JsonPropertyName("proxy"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ public ProxyTestConfiguration? Proxy { get; set; }
+ }
+
+ public class ProxyTestConfiguration
+ {
+ [JsonPropertyName("use_proxy"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ public string? UseProxy { get; set; }
+
+ [JsonPropertyName("proxy_host"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ public string? ProxyHost { get; set; }
+
+ [JsonPropertyName("proxy_port"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ public int? ProxyPort { get; set; }
+
+ [JsonPropertyName("proxy_auth"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ public string? ProxyAuth { get; set; }
+
+ [JsonPropertyName("proxy_uid"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ public string? ProxyUid { get; set; }
+
+ [JsonPropertyName("proxy_pwd"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ public string? ProxyPwd { get; set; }
+
+ [JsonPropertyName("proxy_ignore_list"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ public string? ProxyIgnoreList { get; set; }
}
public class TlsTestConfiguration
diff --git a/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs
b/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs
index c7bdcc215..473e07265 100644
--- a/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs
+++ b/csharp/test/Drivers/Apache/Hive2/HiveServer2TestEnvironment.cs
@@ -132,6 +132,40 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Hive2
parameters.Add(HttpTlsOptions.TrustedCertificatePath,
tlsOptions.TrustedCertificatePath!);
}
}
+
+ // Add proxy configuration if provided
+ if (testConfiguration.HttpOptions.Proxy != null)
+ {
+ ProxyTestConfiguration proxyOptions =
testConfiguration.HttpOptions.Proxy;
+ if (!string.IsNullOrEmpty(proxyOptions.UseProxy))
+ {
+ parameters.Add(HttpProxyOptions.UseProxy,
proxyOptions.UseProxy!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyHost))
+ {
+ parameters.Add(HttpProxyOptions.ProxyHost,
proxyOptions.ProxyHost!);
+ }
+ if (proxyOptions.ProxyPort.HasValue)
+ {
+ parameters.Add(HttpProxyOptions.ProxyPort,
proxyOptions.ProxyPort.Value.ToString());
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyAuth))
+ {
+ parameters.Add(HttpProxyOptions.ProxyAuth,
proxyOptions.ProxyAuth!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyUid))
+ {
+ parameters.Add(HttpProxyOptions.ProxyUID,
proxyOptions.ProxyUid!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyPwd))
+ {
+ parameters.Add(HttpProxyOptions.ProxyPWD,
proxyOptions.ProxyPwd!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyIgnoreList))
+ {
+ parameters.Add(HttpProxyOptions.ProxyIgnoreList,
proxyOptions.ProxyIgnoreList!);
+ }
+ }
}
return parameters;
diff --git a/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
b/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
index a512a3ca5..d05025469 100644
--- a/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
+++ b/csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs
@@ -141,6 +141,40 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
parameters.Add(HttpTlsOptions.TrustedCertificatePath,
tlsOptions.TrustedCertificatePath!);
}
}
+
+ // Add proxy configuration if provided
+ if (testConfiguration.HttpOptions.Proxy != null)
+ {
+ ProxyTestConfiguration proxyOptions =
testConfiguration.HttpOptions.Proxy;
+ if (!string.IsNullOrEmpty(proxyOptions.UseProxy))
+ {
+ parameters.Add(HttpProxyOptions.UseProxy,
proxyOptions.UseProxy!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyHost))
+ {
+ parameters.Add(HttpProxyOptions.ProxyHost,
proxyOptions.ProxyHost!);
+ }
+ if (proxyOptions.ProxyPort.HasValue)
+ {
+ parameters.Add(HttpProxyOptions.ProxyPort,
proxyOptions.ProxyPort.Value.ToString());
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyAuth))
+ {
+ parameters.Add(HttpProxyOptions.ProxyAuth,
proxyOptions.ProxyAuth!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyUid))
+ {
+ parameters.Add(HttpProxyOptions.ProxyUID,
proxyOptions.ProxyUid!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyPwd))
+ {
+ parameters.Add(HttpProxyOptions.ProxyPWD,
proxyOptions.ProxyPwd!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyIgnoreList))
+ {
+ parameters.Add(HttpProxyOptions.ProxyIgnoreList,
proxyOptions.ProxyIgnoreList!);
+ }
+ }
}
return parameters;
diff --git
a/csharp/test/Drivers/Databricks/Auth/OAuthClientCredentialsProviderTests.cs
b/csharp/test/Drivers/Databricks/Auth/OAuthClientCredentialsProviderTests.cs
index ec0fbb817..364a5490f 100644
--- a/csharp/test/Drivers/Databricks/Auth/OAuthClientCredentialsProviderTests.cs
+++ b/csharp/test/Drivers/Databricks/Auth/OAuthClientCredentialsProviderTests.cs
@@ -14,14 +14,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
+using Apache.Arrow.Adbc.Drivers.Databricks.Auth;
using System;
using System.Threading;
using System.Threading.Tasks;
-using Apache.Arrow.Adbc.Drivers.Databricks.Auth;
+using System.Net.Http;
using Xunit;
using Xunit.Abstractions;
-using Apache.Arrow.Adbc.Tests.Drivers.Databricks;
namespace Apache.Arrow.Adbc.Tests.Drivers.Databricks.Auth
{
@@ -56,12 +55,13 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Databricks.Auth
}
return new OAuthClientCredentialsProvider(
+ new HttpClient(),
TestConfiguration.OAuthClientId,
TestConfiguration.OAuthClientSecret,
host,
+ scope,
timeoutMinutes: 1,
- refreshBufferMinutes: refreshBufferMinutes,
- scope: scope);
+ refreshBufferMinutes: refreshBufferMinutes);
}
[SkippableFact]
diff --git a/csharp/test/Drivers/Databricks/DatabricksTestEnvironment.cs
b/csharp/test/Drivers/Databricks/DatabricksTestEnvironment.cs
index d44f19645..89f87a9f8 100644
--- a/csharp/test/Drivers/Databricks/DatabricksTestEnvironment.cs
+++ b/csharp/test/Drivers/Databricks/DatabricksTestEnvironment.cs
@@ -159,6 +159,40 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Databricks
parameters.Add(HttpTlsOptions.TrustedCertificatePath,
tlsOptions.TrustedCertificatePath!);
}
}
+
+ // Add proxy configuration if provided
+ if (testConfiguration.HttpOptions.Proxy != null)
+ {
+ ProxyTestConfiguration proxyOptions =
testConfiguration.HttpOptions.Proxy;
+ if (!string.IsNullOrEmpty(proxyOptions.UseProxy))
+ {
+ parameters.Add(HttpProxyOptions.UseProxy,
proxyOptions.UseProxy!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyHost))
+ {
+ parameters.Add(HttpProxyOptions.ProxyHost,
proxyOptions.ProxyHost!);
+ }
+ if (proxyOptions.ProxyPort.HasValue)
+ {
+ parameters.Add(HttpProxyOptions.ProxyPort,
proxyOptions.ProxyPort.Value.ToString());
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyAuth))
+ {
+ parameters.Add(HttpProxyOptions.ProxyAuth,
proxyOptions.ProxyAuth!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyUid))
+ {
+ parameters.Add(HttpProxyOptions.ProxyUID,
proxyOptions.ProxyUid!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyPwd))
+ {
+ parameters.Add(HttpProxyOptions.ProxyPWD,
proxyOptions.ProxyPwd!);
+ }
+ if (!string.IsNullOrEmpty(proxyOptions.ProxyIgnoreList))
+ {
+ parameters.Add(HttpProxyOptions.ProxyIgnoreList,
proxyOptions.ProxyIgnoreList!);
+ }
+ }
}
return parameters;