Skip to content

Commit ed16c0b

Browse files
use testcontainer for dbsync postgres test
this runs the test in a throwaway postgres docker container rather than needing to run the tests on an actual persistent postgres server
1 parent 2a3c897 commit ed16c0b

File tree

3 files changed

+45
-11
lines changed

3 files changed

+45
-11
lines changed

requirements-dev.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,5 @@ pytest-mock==3.14.0
77
pytest==8.3.3
88
requests-mock==1.12.1
99
ruff==0.6.9
10-
testfixtures==8.3.0
10+
testfixtures==8.3.0
11+
testcontainers==4.9.0

test/test_databases/conftest.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
import pytest
2+
import os
3+
from testcontainers.postgres import PostgresContainer
4+
5+
6+
@pytest.fixture(scope="class")
7+
def postgres_container():
8+
"""Initialize a Postgres container
9+
10+
Sets environment variables so that parsons.Postgres(port=None)
11+
connects to this container automatically.
12+
"""
13+
with PostgresContainer("postgres:9.5") as postgres:
14+
os.environ["PGUSER"] = "test"
15+
os.environ["PGPASSWORD"] = "test"
16+
os.environ["PGHOST"] = "localhost"
17+
os.environ["PGDATABASE"] = "test"
18+
os.environ["PGPORT"] = postgres.get_exposed_port(5432)
19+
yield

test/test_databases/test_dbsync.py

Lines changed: 24 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
from parsons import Postgres, DBSync, Table, Redshift
2+
import pytest
23
from parsons.databases.database_connector import DatabaseConnector
34
from abc import ABC
45
from test.test_databases.fakes import FakeDatabase
6+
57
from test.utils import assert_matching_tables
68
from typing import Type
79
import unittest
@@ -41,7 +43,9 @@ def setUp(self):
4143
f"{self.temp_schema}.source_table" if self.temp_schema else "source_table"
4244
)
4345
self.destination_table = (
44-
f"{self.temp_schema}.destination_table" if self.temp_schema else "destination_table"
46+
f"{self.temp_schema}.destination_table"
47+
if self.temp_schema
48+
else "destination_table"
4549
)
4650

4751
# Create source table
@@ -64,7 +68,9 @@ def tearDown(self):
6468

6569
def assert_matching_tables(self) -> None:
6670
source = self.source_db.query(f"SELECT * FROM {self.source_table}")
67-
destination = self.destination_db.query(f"SELECT * FROM {self.destination_table}")
71+
destination = self.destination_db.query(
72+
f"SELECT * FROM {self.destination_table}"
73+
)
6874
assert_matching_tables(source, destination)
6975

7076
def table_sync_full(self, if_exists: str, **kwargs):
@@ -100,15 +106,19 @@ def test_table_sync_full_empty_table(self):
100106
def test_table_sync_full_chunk(self):
101107
# Test chunking in full sync.
102108
self.db_sync.chunk_size = 10
103-
self.db_sync.table_sync_full(self.source_table, self.destination_table, if_exists="drop")
109+
self.db_sync.table_sync_full(
110+
self.source_table, self.destination_table, if_exists="drop"
111+
)
104112
self.assert_matching_tables()
105113

106114
def test_table_sync_incremental(self):
107115
# Test that incremental sync
108116

109117
self.destination_db.copy(self.table1, self.destination_table)
110118
self.source_db.copy(self.table2, self.source_table, if_exists="append")
111-
self.db_sync.table_sync_incremental(self.source_table, self.destination_table, "pk")
119+
self.db_sync.table_sync_incremental(
120+
self.source_table, self.destination_table, "pk"
121+
)
112122
self.assert_matching_tables()
113123

114124
def test_table_sync_incremental_chunk(self):
@@ -117,13 +127,17 @@ def test_table_sync_incremental_chunk(self):
117127
self.db_sync.chunk_size = 10
118128
self.destination_db.copy(self.table1, self.destination_table)
119129
self.source_db.copy(self.table2, self.source_table, if_exists="append")
120-
self.db_sync.table_sync_incremental(self.source_table, self.destination_table, "pk")
130+
self.db_sync.table_sync_incremental(
131+
self.source_table, self.destination_table, "pk"
132+
)
121133

122134
self.assert_matching_tables()
123135

124136
def test_table_sync_incremental_create_destination_table(self):
125137
# Test that an incremental sync works if the destination table does not exist.
126-
self.db_sync.table_sync_incremental(self.source_table, self.destination_table, "pk")
138+
self.db_sync.table_sync_incremental(
139+
self.source_table, self.destination_table, "pk"
140+
)
127141
self.assert_matching_tables()
128142

129143
def test_table_sync_incremental_empty_table(self):
@@ -182,9 +196,7 @@ def test_table_sync_full_write_chunk(self):
182196
)
183197

184198

185-
# These tests interact directly with the Postgres database. In order to run, set the
186-
# env to LIVE_TEST='TRUE'.
187-
@unittest.skipIf(not os.environ.get("LIVE_TEST"), "Skipping because not running live test")
199+
@pytest.mark.usefixtures("postgres_container")
188200
class TestPostgresDBSync(TestDBSync):
189201
db = Postgres
190202
setup_sql = f"""
@@ -198,6 +210,8 @@ class TestPostgresDBSync(TestDBSync):
198210

199211
# These tests interact directly with the Postgres database. In order to run, set the
200212
# env to LIVE_TEST='TRUE'.
201-
@unittest.skipIf(not os.environ.get("LIVE_TEST"), "Skipping because not running live test")
213+
@unittest.skipIf(
214+
not os.environ.get("LIVE_TEST"), "Skipping because not running live test"
215+
)
202216
class TestRedshiftDBSync(TestPostgresDBSync):
203217
db = Redshift

0 commit comments

Comments
 (0)