|
| 1 | +from collections.abc import Callable |
1 | 2 | from unittest.mock import Mock |
| 3 | +from datetime import datetime, timezone, timedelta |
2 | 4 | import pytest |
3 | 5 | from pytest_mock import MockerFixture |
4 | 6 | from mptt.exceptions import InvalidMove |
5 | 7 | from uuid import uuid4 |
| 8 | +from io import StringIO |
6 | 9 |
|
7 | | -from country_workspace.contrib.hope.sync.base import LogLevel |
| 10 | +from country_workspace.contrib.hope.sync.base import BaseSync, SkipRecordError |
8 | 11 | from country_workspace.contrib.hope.sync.context_geo import SyncContextGeo, SyncStep, sync_context_geo |
| 12 | +from country_workspace.models import Country, AreaType, Area |
| 13 | + |
| 14 | +_today = datetime.now(timezone.utc).date() |
9 | 15 |
|
10 | 16 | COUNTRY = { |
| 17 | + "path": "lookups/country", |
| 18 | + "updated_at_after": "2025-05-05", |
11 | 19 | "results": [ |
12 | 20 | { |
13 | 21 | "id": str(uuid4()), |
14 | | - "name": "Ukraine", |
15 | | - "iso_code2": "UA", |
16 | | - "iso_code3": "UKR", |
| 22 | + "name": "Testland Example", |
| 23 | + "iso_code2": "TL", |
| 24 | + "iso_code3": "TLD", |
| 25 | + "short_name": "Testland", |
17 | 26 | }, |
18 | 27 | ], |
19 | 28 | } |
20 | 29 |
|
21 | | -AREATYPE = { |
| 30 | +AREA_TYPES = { |
| 31 | + "path": "areatypes", |
| 32 | + "updated_at_after": "2025-05-05", |
22 | 33 | "results": [ |
23 | 34 | { |
24 | 35 | "id": str(uuid4()), |
25 | | - "name": "UA52", |
26 | | - "country": "UA", |
27 | | - "area_level": 1, |
| 36 | + "name": "RegionExample", |
| 37 | + "country": "country-hope-id", |
| 38 | + "parent": "parent-hope-id", |
| 39 | + "area_level": 2, |
| 40 | + "valid_from": (_today - timedelta(days=30)).isoformat(), |
| 41 | + "valid_until": (_today + timedelta(days=30)).isoformat(), |
| 42 | + "extras": {"foo": "bar"}, |
28 | 43 | }, |
29 | 44 | ], |
30 | 45 | } |
31 | 46 |
|
32 | | -AREA = { |
| 47 | + |
| 48 | +AREAS = { |
| 49 | + "path": "areas", |
| 50 | + "updated_at_after": "2025-05-05", |
33 | 51 | "results": [ |
34 | 52 | { |
35 | 53 | "id": str(uuid4()), |
36 | | - "name": "Kyivska", |
37 | | - "p_code": "UA32", |
38 | | - "area_type": str(uuid4()), |
39 | | - } |
| 54 | + "name": "AreaExample", |
| 55 | + "area_type": "areatype-hope-id", |
| 56 | + "parent": "parent-area-id", |
| 57 | + "p_code": "P001", |
| 58 | + "valid_from": (_today - timedelta(days=30)).isoformat(), |
| 59 | + "valid_until": (_today + timedelta(days=30)).isoformat(), |
| 60 | + "extras": {"x": "y"}, |
| 61 | + }, |
40 | 62 | ], |
41 | 63 | } |
42 | 64 |
|
43 | 65 |
|
44 | | -def test_assign_parents_success(mock_model: Mock, mocker: MockerFixture) -> None: |
45 | | - child_instance = Mock() |
46 | | - parent_instance = Mock() |
47 | | - mock_model.objects.get.side_effect = [child_instance, parent_instance] |
48 | | - mock_model.objects.bulk_update = Mock() |
| 66 | +@pytest.fixture |
| 67 | +def sync_geo(base_sync: BaseSync) -> Callable: |
| 68 | + return SyncContextGeo(client=base_sync.client, stdout=base_sync.stdout) |
| 69 | + |
| 70 | + |
| 71 | +def test_sync_countries(mocker: MockerFixture, sync_geo: SyncContextGeo) -> None: |
| 72 | + mock_sync_entity = mocker.patch.object(sync_geo, "sync_entity") |
| 73 | + mocker.patch.object(sync_geo, "get_updated_at_after", return_value=COUNTRY["updated_at_after"]) |
| 74 | + |
| 75 | + sync_geo.sync_countries() |
| 76 | + |
| 77 | + mock_sync_entity.assert_called_once() |
| 78 | + config = mock_sync_entity.call_args.args[0] |
| 79 | + assert config["model"] is Country |
| 80 | + ep = config["endpoint"] |
| 81 | + assert (ep["path"], ep.get("params")) == (COUNTRY["path"], {"updated_at_after": COUNTRY["updated_at_after"]}) |
| 82 | + |
| 83 | + expected_defaults = {k: COUNTRY["results"][0][k] for k in ("name", "iso_code2", "iso_code3")} |
| 84 | + defaults = config["prepare_defaults"](COUNTRY["results"][0]) |
| 85 | + assert defaults == expected_defaults |
49 | 86 |
|
50 | | - sync_context = SyncContextGeo(client=mocker.Mock(), stdout=mocker.Mock()) |
51 | 87 |
|
52 | | - parent_mapping = {"1": "2"} |
53 | | - sync_context._assign_parents(mock_model, parent_mapping) |
| 88 | +@pytest.mark.parametrize("expect_error", [False, True], ids=["Country-Exist", "Country-DoesNotExist"]) |
| 89 | +def test_sync_area_types(mocker: MockerFixture, sync_geo: SyncContextGeo, expect_error: bool) -> None: |
| 90 | + mocker.patch.object(sync_geo, "sync_countries") |
| 91 | + m_entity = mocker.patch.object(sync_geo, "sync_entity") |
| 92 | + mocker.patch.object(sync_geo, "get_updated_at_after", return_value=AREA_TYPES["updated_at_after"]) |
| 93 | + m_assign = mocker.patch.object(sync_geo, "_assign_parents") |
| 94 | + m_rebuild = mocker.patch.object(AreaType.objects, "rebuild") |
54 | 95 |
|
55 | | - assert child_instance.parent == parent_instance |
56 | | - mock_model.objects.bulk_update.assert_called_once_with([child_instance], fields=["parent"]) |
| 96 | + if expect_error: |
| 97 | + mock_country = mocker.patch.object(Country.objects, "get", side_effect=Country.DoesNotExist) |
| 98 | + else: |
| 99 | + mock_country = mocker.patch.object(Country.objects, "get", return_value=object()) |
| 100 | + |
| 101 | + sync_geo.sync_area_types() |
| 102 | + |
| 103 | + m_entity.assert_called_once() |
| 104 | + cfg = m_entity.call_args[0][0] |
| 105 | + rec = AREA_TYPES["results"][0] |
| 106 | + |
| 107 | + if expect_error: |
| 108 | + with pytest.raises(SkipRecordError, match="Country not found."): |
| 109 | + cfg["prepare_defaults"](rec) |
| 110 | + else: |
| 111 | + expected = { |
| 112 | + "country": mock_country.return_value, |
| 113 | + **{k: rec[k] for k in ("name", "area_level", "valid_from", "valid_until", "extras")}, |
| 114 | + } |
| 115 | + assert cfg["prepare_defaults"](rec) == expected |
| 116 | + |
| 117 | + m_assign.assert_called_once_with(AreaType, {rec["id"]: rec["parent"]}) |
| 118 | + m_rebuild.assert_called_once() |
| 119 | + |
| 120 | + |
| 121 | +@pytest.mark.parametrize("expect_error", [False, True], ids=["AreaType-Exist", "AreaType-DoesNotExist"]) |
| 122 | +def test_sync_areas(mocker: MockerFixture, sync_geo: SyncContextGeo, expect_error: bool) -> None: |
| 123 | + mocker.patch.object(sync_geo, "sync_area_types") |
| 124 | + m_entity = mocker.patch.object(sync_geo, "sync_entity") |
| 125 | + mocker.patch.object(sync_geo, "get_updated_at_after", return_value=AREAS["updated_at_after"]) |
| 126 | + m_assign = mocker.patch.object(sync_geo, "_assign_parents") |
| 127 | + m_rebuild = mocker.patch.object(Area.objects, "rebuild") |
| 128 | + |
| 129 | + if expect_error: |
| 130 | + mocker.patch.object(AreaType.objects, "get", side_effect=AreaType.DoesNotExist) |
| 131 | + else: |
| 132 | + mock_area_type = mocker.patch.object(AreaType.objects, "get", return_value=object()) |
| 133 | + |
| 134 | + sync_geo.sync_areas() |
| 135 | + |
| 136 | + m_entity.assert_called_once() |
| 137 | + cfg = m_entity.call_args[0][0] |
| 138 | + rec = AREAS["results"][0] |
| 139 | + |
| 140 | + if expect_error: |
| 141 | + with pytest.raises(SkipRecordError, match="AreaType not found."): |
| 142 | + cfg["prepare_defaults"](rec) |
| 143 | + else: |
| 144 | + expected = { |
| 145 | + "area_type": mock_area_type.return_value, |
| 146 | + **{k: rec[k] for k in ("name", "p_code", "valid_from", "valid_until", "extras")}, |
| 147 | + } |
| 148 | + assert cfg["prepare_defaults"](rec) == expected |
| 149 | + m_assign.assert_called_once_with(Area, {rec["id"]: rec["parent"]}) |
| 150 | + m_rebuild.assert_called_once() |
57 | 151 |
|
58 | 152 |
|
59 | 153 | @pytest.mark.parametrize( |
60 | | - ("get_side_effect", "error_message"), |
| 154 | + ("child_ok", "parent_ok", "bulk_exc", "expected_bulk_calls", "expected_logs"), |
61 | 155 | [ |
62 | | - ([Exception("DoesNotExist"), Mock()], "test_model: child '1' not found for parent assignment"), |
63 | | - ([Mock(), Exception("DoesNotExist")], "test_model parent '2' not found for assignment"), |
| 156 | + (True, True, False, 1, []), |
| 157 | + (False, True, False, 0, ["RECORD_SKIPPED"]), |
| 158 | + (True, False, False, 0, ["RECORD_SKIPPED"]), |
| 159 | + (True, True, True, 1, ["RECORD_SYNC_FAILURE"]), |
64 | 160 | ], |
65 | | - ids=["missing_child", "missing_parent"], |
| 161 | + ids=["success", "missing_child", "missing_parent", "invalid_move"], |
66 | 162 | ) |
67 | | -def test_assign_parents_missing(mock_model: Mock, mocker: MockerFixture, get_side_effect, error_message) -> None: |
68 | | - mock_model.DoesNotExist = Exception |
69 | | - mock_model.objects.get.side_effect = get_side_effect |
70 | | - mocker.patch.object(SyncContextGeo, "emit_log") |
71 | | - |
72 | | - sync_context = SyncContextGeo(client=mocker.Mock(), stdout=mocker.Mock()) |
73 | | - sync_context._assign_parents(mock_model, {"1": "2"}) |
74 | | - |
75 | | - sync_context.emit_log.assert_called_once_with( |
76 | | - "RECORD_SKIPPED", |
77 | | - hope_id="1", |
78 | | - error=error_message, |
79 | | - ) |
80 | | - assert not hasattr(mock_model.objects, "bulk_update") or not mock_model.objects.bulk_update.called |
| 163 | +def test_assign_parents( |
| 164 | + mocker: MockerFixture, |
| 165 | + sync_geo: SyncContextGeo, |
| 166 | + mock_model: Mock, |
| 167 | + child_ok: bool, |
| 168 | + parent_ok: bool, |
| 169 | + bulk_exc: bool, |
| 170 | + expected_bulk_calls: int, |
| 171 | + expected_logs: list[str], |
| 172 | +) -> None: |
| 173 | + child_id, parent_id = "c1", "p1" |
| 174 | + mapping = {child_id: parent_id} |
| 175 | + child_inst = mocker.Mock(hope_id=child_id) |
| 176 | + parent_inst = mocker.Mock(hope_id=parent_id) |
| 177 | + |
| 178 | + def fake_get(*, hope_id): |
| 179 | + instances = { |
| 180 | + **({child_id: child_inst} if child_ok else {}), |
| 181 | + **({parent_id: parent_inst} if parent_ok else {}), |
| 182 | + } |
| 183 | + obj = instances.get(hope_id) |
| 184 | + if obj is None: |
| 185 | + raise mock_model.DoesNotExist |
| 186 | + return obj |
| 187 | + |
| 188 | + mocker.patch.object(mock_model.objects, "get", side_effect=fake_get) |
| 189 | + |
| 190 | + m_bulk = mocker.patch.object(mock_model.objects, "bulk_update") |
| 191 | + if bulk_exc: |
| 192 | + m_bulk.side_effect = InvalidMove("boom") |
81 | 193 |
|
| 194 | + m_log = mocker.patch.object(sync_geo, "emit_log") |
82 | 195 |
|
83 | | -def test_assign_parents_invalid_move(mock_model: Mock, mocker: MockerFixture) -> None: |
84 | | - child_instance = Mock() |
85 | | - parent_instance = Mock() |
86 | | - mock_model.objects.get.side_effect = [child_instance, parent_instance] |
87 | | - mock_model.objects.bulk_update.side_effect = InvalidMove("Invalid tree move") |
88 | | - mocker.patch.object(SyncContextGeo, "emit_log") |
| 196 | + sync_geo._assign_parents(mock_model, mapping) |
89 | 197 |
|
90 | | - sync_context = SyncContextGeo(client=mocker.Mock(), stdout=mocker.Mock()) |
| 198 | + assert m_bulk.call_count == expected_bulk_calls |
| 199 | + keys = [call.args[0] for call in m_log.call_args_list] |
| 200 | + assert keys == expected_logs |
91 | 201 |
|
92 | | - parent_mapping = {"1": "2"} |
93 | | - sync_context._assign_parents(mock_model, parent_mapping) |
94 | 202 |
|
95 | | - mock_model._meta.model_name = "test_model" |
96 | | - sync_context.emit_log.assert_called_once_with( |
97 | | - "RECORD_SYNC_FAILURE", |
98 | | - LogLevel.ERROR, |
99 | | - hope_id="multiple", |
100 | | - error="Invalid MPTT move during bulk update for 'test_model': Invalid tree move", |
| 203 | +def test_sync_context_geo_invokes_sync_context(mocker): |
| 204 | + fake_result = {"ok": True} |
| 205 | + patch = mocker.patch( |
| 206 | + "country_workspace.contrib.hope.sync.context_geo.sync_context", |
| 207 | + return_value=fake_result, |
101 | 208 | ) |
| 209 | + stdout = StringIO() |
102 | 210 |
|
| 211 | + result = sync_context_geo(step=SyncStep.AREAS, stdout=stdout) |
103 | 212 |
|
104 | | -def test_sync_context_geo_step(mock_model: Mock, mocker: MockerFixture) -> None: |
105 | | - mocker.patch.object(SyncContextGeo, "sync_countries") |
106 | | - result = sync_context_geo(step=SyncStep.COUNTRIES, stdout=mocker.Mock()) |
107 | | - SyncContextGeo.sync_countries.assert_called_once() |
108 | | - assert isinstance(result, dict) |
| 213 | + patch.assert_called_once_with(SyncContextGeo, step=SyncStep.AREAS, stdout=stdout) |
| 214 | + assert result is fake_result |
0 commit comments