Skip to content

Commit 44f6853

Browse files
committed
Add validation for cloud fetch options
1 parent 6df9771 commit 44f6853

File tree

2 files changed

+41
-8
lines changed

2 files changed

+41
-8
lines changed

csharp/src/Drivers/Databricks/DatabricksConnection.cs

+36-8
Original file line numberDiff line numberDiff line change
@@ -37,23 +37,51 @@ internal class DatabricksConnection : SparkHttpConnection
3737
private long _maxBytesPerFile = DefaultMaxBytesPerFile;
3838

3939
public DatabricksConnection(IReadOnlyDictionary<string, string> properties) : base(properties)
40+
{
41+
ValidateProperties();
42+
}
43+
44+
private void ValidateProperties()
4045
{
4146
// Parse CloudFetch options from connection properties
42-
if (Properties.TryGetValue(DatabricksParameters.UseCloudFetch, out string? useCloudFetchStr) &&
43-
bool.TryParse(useCloudFetchStr, out bool useCloudFetchValue))
47+
if (Properties.TryGetValue(DatabricksParameters.UseCloudFetch, out string? useCloudFetchStr))
4448
{
45-
_useCloudFetch = useCloudFetchValue;
49+
if (bool.TryParse(useCloudFetchStr, out bool useCloudFetchValue))
50+
{
51+
_useCloudFetch = useCloudFetchValue;
52+
}
53+
else
54+
{
55+
throw new ArgumentException($"Parameter '{DatabricksParameters.UseCloudFetch}' value '{useCloudFetchStr}' could not be parsed. Valid values are 'true' and 'false'.");
56+
}
4657
}
4758

48-
if (Properties.TryGetValue(DatabricksParameters.CanDecompressLz4, out string? canDecompressLz4Str) &&
49-
bool.TryParse(canDecompressLz4Str, out bool canDecompressLz4Value))
59+
if (Properties.TryGetValue(DatabricksParameters.CanDecompressLz4, out string? canDecompressLz4Str))
5060
{
51-
_canDecompressLz4 = canDecompressLz4Value;
61+
if (bool.TryParse(canDecompressLz4Str, out bool canDecompressLz4Value))
62+
{
63+
_canDecompressLz4 = canDecompressLz4Value;
64+
}
65+
else
66+
{
67+
throw new ArgumentException($"Parameter '{DatabricksParameters.CanDecompressLz4}' value '{canDecompressLz4Str}' could not be parsed. Valid values are 'true' and 'false'.");
68+
}
5269
}
5370

54-
if (Properties.TryGetValue(DatabricksParameters.MaxBytesPerFile, out string? maxBytesPerFileStr) &&
55-
long.TryParse(maxBytesPerFileStr, out long maxBytesPerFileValue))
71+
if (Properties.TryGetValue(DatabricksParameters.MaxBytesPerFile, out string? maxBytesPerFileStr))
5672
{
73+
if (!long.TryParse(maxBytesPerFileStr, out long maxBytesPerFileValue))
74+
{
75+
throw new ArgumentException($"Parameter '{DatabricksParameters.MaxBytesPerFile}' value '{maxBytesPerFileStr}' could not be parsed. Valid values are positive integers.");
76+
}
77+
78+
if (maxBytesPerFileValue <= 0)
79+
{
80+
throw new ArgumentOutOfRangeException(
81+
nameof(Properties),
82+
maxBytesPerFileValue,
83+
$"Parameter '{DatabricksParameters.MaxBytesPerFile}' value must be a positive integer.");
84+
}
5785
_maxBytesPerFile = maxBytesPerFileValue;
5886
}
5987
}

csharp/test/Drivers/Databricks/DatabricksConnectionTest.cs

+5
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
using Apache.Arrow.Adbc.Drivers.Apache;
2323
using Apache.Arrow.Adbc.Drivers.Apache.Hive2;
2424
using Apache.Arrow.Adbc.Drivers.Apache.Spark;
25+
using Apache.Arrow.Adbc.Drivers.Databricks;
2526
using Thrift.Transport;
2627
using Xunit;
2728
using Xunit.Abstractions;
@@ -295,6 +296,10 @@ public InvalidConnectionParametersTestData()
295296
Add(new([], typeof(ArgumentException)));
296297
Add(new(new() { [SparkParameters.Type] = " " }, typeof(ArgumentException)));
297298
Add(new(new() { [SparkParameters.Type] = "xxx" }, typeof(ArgumentException)));
299+
Add(new(new() { [SparkParameters.HostName] = "valid.server.com", [SparkParameters.Token] = "abcdef", [DatabricksParameters.UseCloudFetch] = "notabool" }, typeof(ArgumentException)));
300+
Add(new(new() { [SparkParameters.HostName] = "valid.server.com", [SparkParameters.Token] = "abcdef", [DatabricksParameters.CanDecompressLz4] = "notabool"}, typeof(ArgumentException)));
301+
Add(new(new() { [SparkParameters.HostName] = "valid.server.com", [SparkParameters.Token] = "abcdef", [DatabricksParameters.MaxBytesPerFile] = "notanumber" }, typeof(ArgumentException)));
302+
Add(new(new() { [SparkParameters.HostName] = "valid.server.com", [SparkParameters.Token] = "abcdef", [DatabricksParameters.MaxBytesPerFile] = "-100" }, typeof(ArgumentOutOfRangeException)));
298303
Add(new(new() { /*[SparkParameters.Type] = SparkServerTypeConstants.Databricks,*/ [SparkParameters.HostName] = "valid.server.com", [SparkParameters.Token] = "abcdef", [SparkParameters.Port] = "-1" }, typeof(ArgumentOutOfRangeException)));
299304
Add(new(new() { /*[SparkParameters.Type] = SparkServerTypeConstants.Databricks,*/ [SparkParameters.HostName] = "valid.server.com", [SparkParameters.Token] = "abcdef", [SparkParameters.Port] = IPEndPoint.MinPort.ToString(CultureInfo.InvariantCulture) }, typeof(ArgumentOutOfRangeException)));
300305
Add(new(new() { /*[SparkParameters.Type] = SparkServerTypeConstants.Databricks,*/ [SparkParameters.HostName] = "valid.server.com", [SparkParameters.Token] = "abcdef", [SparkParameters.Port] = (IPEndPoint.MaxPort + 1).ToString(CultureInfo.InvariantCulture) }, typeof(ArgumentOutOfRangeException)));

0 commit comments

Comments
 (0)