Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 11 additions & 18 deletions tests/resources/db2_test_tables.sql
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,8 @@ CREATE TABLE IF NOT EXISTS pso_data_validator.dvt_null_not_null
);
COMMENT ON TABLE pso_data_validator.dvt_null_not_null IS 'Nullable integration test table, DB2 is assumed to be a DVT source (not target).';

-- In Db2 the maximum precision for a DECIMAL data type is 31 digits.
-- The renders some columns in this table incompatible with the table in other systems.
CREATE TABLE IF NOT EXISTS pso_data_validator.dvt_large_decimals
( id DECIMAL(31) NOT NULL PRIMARY KEY
, col_data VARCHAR(10)
Expand All @@ -108,41 +110,32 @@ CREATE TABLE IF NOT EXISTS pso_data_validator.dvt_large_decimals
);
COMMENT ON TABLE pso_data_validator.dvt_large_decimals IS 'Large decimals integration test table';

/* INSERT INTO pso_data_validator.dvt_large_decimals VALUES
INSERT INTO pso_data_validator.dvt_large_decimals VALUES
(123456789012345678901234567890,'Row 1'
,987654321012345678
,1234567890123456789012345678901
,12345678901234567890123456789.123456
,12345678.123456789012345678901234567890
,NULL,NULL,NULL
,987654321012345678,12345678901234567.1);
INSERT INTO pso_data_validator.dvt_large_decimals VALUES
(223456789012345678901234567890,'Row 2'
,987654321012345678
,1234567890123456789012345678901
,12345678901234567890123456789.123456789
,12345678.123456789012345678901234567890
,NULL,NULL,NULL
,987654321012345678,12345678901234567.1);
INSERT INTO pso_data_validator.dvt_large_decimals VALUES
(323456789012345678901234567890,'Row 3'
,987654321012345678
,1234567890123456789012345678901
,12345678901234567890123456789.123456789
,12345678.123456789012345678901234567890
,NULL,NULL,NULL
,987654321012345678,12345678901234567.1);
INSERT INTO pso_data_validator.dvt_large_decimals VALUES
(423456789012345678901234567890,'Row 4'
,987654321012345678
,1234567890123456789012345678901
,12345678901234567890123456789.123456789
,12345678.123456789012345678901234567890
,987654321012345678,12345678901234567.1);INSERT INTO db2inst1.dvt_large_decimals VALUES
,NULL,NULL,NULL
,987654321012345678,12345678901234567.1);
INSERT INTO pso_data_validator.dvt_large_decimals VALUES
(523456789012345678901234567890,'Row 5'
,987654321012345678
,1234567890123456789012345678901
,12345678901234567890123456789.123456789
,12345678.123456789012345678901234567890
,NULL,NULL,NULL
,987654321012345678,12345678901234567.1);
COMMIT; */
COMMIT;

CREATE TABLE IF NOT EXISTS pso_data_validator.dvt_binary
( binary_id VARBINARY(16) NOT NULL PRIMARY KEY
Expand Down
66 changes: 65 additions & 1 deletion tests/system/data_sources/test_db2.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def test_column_validation_core_types():
new=mock_get_connection_config,
)
def test_column_validation_core_types_to_bigquery():
"""DB2 to BigQuery dvt_core_types column validation"""
"""Db2 to BigQuery dvt_core_types column validation"""
# Excluded col_float32 because BigQuery does not have an exact same type and float32/64 are lossy and cannot be compared.
# Excluded col_tstz since it is not possible to set time zone at this column on Db2
cols = "col_int8,col_int16,col_int32,col_int64,col_dec_20,col_dec_38,col_dec_10_2,col_float64,col_varchar_30,col_char_2,col_string,col_date,col_datetime"
Expand All @@ -162,6 +162,50 @@ def test_column_validation_core_types_to_bigquery():
)


@mock.patch(
"data_validation.state_manager.StateManager.get_connection_config",
new=mock_get_connection_config,
)
def test_column_validation_large_decimals_to_bigquery():
"""Db2 to BigQuery dvt_large_decimals column validation.

Only includes decimal(18) columns due to Db2 maximum precision for DECIMAL of 31 digits.
"""
cols = "id,col_dec_18"
column_validation_test(
tables="pso_data_validator.dvt_large_decimals",
tc="bq-conn",
count_cols=cols,
min_cols=cols,
sum_cols=cols,
avg_cols=cols,
std_cols=cols,
)


@mock.patch(
"data_validation.state_manager.StateManager.get_connection_config",
new=mock_get_connection_config,
)
def test_column_validation_large_decimals_to_bigquery_mismatch():
"""Db2 to BigQuery dvt_large_decimals column validation on columns we expect to have a mismatch.

Regression test for:
https://github.com/GoogleCloudPlatform/professional-services-data-validator/issues/1007
"""
cols = "col_dec_18_fail,col_dec_18_1_fail"
df = column_validation_test(
tables="pso_data_validator.dvt_large_decimals",
tc="bq-conn",
count_cols=cols,
sum_cols=cols,
expected_rows=2,
)
# The columns below have mismatching data and should be in the Dataframe.
assert "sum__col_dec_18_fail" in df[consts.VALIDATION_NAME].values
assert "sum__col_dec_18_1_fail" in df[consts.VALIDATION_NAME].values


@mock.patch(
"data_validation.state_manager.StateManager.get_connection_config",
new=mock_get_connection_config,
Expand Down Expand Up @@ -259,6 +303,26 @@ def test_row_validation_core_types_to_bigquery():
)


@mock.patch(
"data_validation.state_manager.StateManager.get_connection_config",
new=mock_get_connection_config,
)
def test_row_validation_large_decimals_to_bigquery():
"""Db2 to BigQuery dvt_large_decimals row validation.
See https://github.com/GoogleCloudPlatform/professional-services-data-validator/issues/956
This is testing large decimals for the primary key join column plus the hash columns.
Only includes decimal(18) columns due to Db2 maximum precision for DECIMAL of 31 digits.
"""
# Add id,col_data into hash value below once issue-1634 has been fixed.
row_validation_test(
tables="pso_data_validator.dvt_large_decimals",
tc="bq-conn",
hash="col_dec_18",
use_randow_row=True,
random_row_batch_size=5,
)


@mock.patch(
"data_validation.state_manager.StateManager.get_connection_config",
new=mock_get_connection_config,
Expand Down