Skip to content

Commit 5179e4f

Browse files
Use value_mapper argument
1 parent 6285196 commit 5179e4f

File tree

6 files changed

+61
-77
lines changed

6 files changed

+61
-77
lines changed

src/country_workspace/contrib/kobo/sync.py

Lines changed: 11 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import re
22
from collections.abc import Callable, Iterable
3-
from functools import reduce, partial
3+
from functools import partial
44
from typing import Any, Final, TypedDict, cast
55

66
from constance import config as constance_config
@@ -16,6 +16,7 @@
1616
from country_workspace.models import AsyncJob, Batch, Household, Individual
1717
from country_workspace.utils.config import BatchNameConfig, FailIfAlienConfig
1818
from country_workspace.utils.fields import clean_field_names, TO_UPPERCASE_FIELDS
19+
from country_workspace.utils.functional import compose
1920

2021

2122
class Config(BatchNameConfig, FailIfAlienConfig):
@@ -62,27 +63,18 @@ def normalize_json(data: dict[str, Any]) -> dict[str, Any]:
6263
return {key.split("/")[-1]: value for key, value in data.items()}
6364

6465

65-
EntryProcessor = Callable[[dict], dict]
66+
type RawIndividual = dict[str, Any]
6667

6768

68-
def apply_processor(entry: dict, processor: EntryProcessor) -> dict:
69-
return processor(entry)
70-
71-
72-
def uppercase_fields(fields: tuple[str], entry: dict) -> dict:
73-
for field in fields:
74-
if field in entry:
75-
entry[field] = entry[field].upper()
76-
77-
return entry
78-
79-
80-
def preprocess_individual(individual: dict) -> dict:
81-
processors = (
69+
def preprocess_individual(individual: RawIndividual) -> RawIndividual:
70+
clean: Callable[[RawIndividual], RawIndividual] = partial(
71+
clean_field_names, fields_to_uppercase=FIELDS_TO_UPPERCASE + TO_UPPERCASE_FIELDS
72+
)
73+
processor = compose(
8274
normalize_json,
83-
partial(clean_field_names, fields_to_uppercase=FIELDS_TO_UPPERCASE + TO_UPPERCASE_FIELDS),
75+
clean,
8476
)
85-
return reduce(apply_processor, processors, individual)
77+
return processor(individual)
8678

8779

8880
def get_fullname_key(individual: Iterable[str]) -> str | None:
@@ -98,7 +90,7 @@ def create_individuals(batch: Batch, household: Household, submission: Submissio
9890
Individual(
9991
batch=batch,
10092
household=household,
101-
name=individual.get(fullname, ""),
93+
name=individual.get(fullname, "") if fullname else "",
10294
flex_fields=individual,
10395
),
10496
)

src/country_workspace/datasources/rdi.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -68,12 +68,6 @@ def has_household_pk(row: Record) -> bool:
6868
return filter(has_household_pk, sheet)
6969

7070

71-
def postprocess_cell(sheets: MultiSheet) -> MultiSheet:
72-
for sheet_idx, rows in sheets:
73-
formated_rows = ({k: strip_time_iso(v) for k, v in row.items()} for row in rows)
74-
yield sheet_idx, formated_rows
75-
76-
7771
def process_households(sheet: Sheet, job: AsyncJob, batch: Batch, config: Config) -> Mapping[int, Household]:
7872
mapping = {}
7973

@@ -162,7 +156,7 @@ def merge_images(sheet: Sheet, sheet_images: Mapping[int, Mapping[int, str]]) ->
162156

163157

164158
def read_sheets(config: Config, filepath: str, *sheet_indices: int) -> Generator[Sheet, None, None]:
165-
sheets = postprocess_cell(open_xls_multi(filepath, sheets=list(sheet_indices)))
159+
sheets = open_xls_multi(filepath, sheets=list(sheet_indices), value_mapper=strip_time_iso)
166160
sheet_images = extract_images(filepath, *sheet_indices)
167161
for (_, sheet), images in zip(sheets, sheet_images, strict=False):
168162
sheet_with_images = merge_images(sheet, images)
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
from collections.abc import Callable
2+
from functools import reduce, partial
3+
from typing import overload, Any
4+
5+
type Function[T, R] = Callable[[T], R]
6+
7+
8+
def apply[T, R](obj: T, func: Function[T, R]) -> R:
9+
return func(obj)
10+
11+
12+
@overload
13+
def compose[T, R](func0: Function[T, R], /) -> Function[T, R]: ...
14+
15+
16+
@overload
17+
def compose[T, TR0, R](func0: Function[T, TR0], func1: Function[TR0, T], /) -> Function[T, R]: ...
18+
19+
20+
@overload
21+
def compose[T, TR0, TR1, R](
22+
func0: Function[T, TR0], func1: Function[TR0, TR1], func2: Function[TR1, R], /
23+
) -> Function[T, R]: ...
24+
25+
26+
def compose(*funcs: Function[Any, Any]) -> Function[Any, Any]:
27+
return partial(reduce, apply, funcs)

tests/contrib/kobo/test_kobo_sync.py

Lines changed: 4 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717
import_data,
1818
is_submission_data_url,
1919
make_client,
20-
apply_processor,
21-
uppercase_fields,
2220
FIELDS_TO_UPPERCASE,
2321
preprocess_individual,
2422
get_fullname_key,
@@ -215,23 +213,6 @@ def test_import_data(mocker: MockerFixture, config: Config) -> None:
215213
import_asset_mock.assert_called_once_with(batch_mock, asset_mock, config)
216214

217215

218-
def test_apply_processor() -> None:
219-
entry_mock = Mock()
220-
processor_mock = Mock()
221-
assert apply_processor(entry_mock, processor_mock) == processor_mock.return_value
222-
processor_mock.assert_called_once_with(entry_mock)
223-
224-
225-
def test_uppercase_fields_field_exists() -> None:
226-
result = uppercase_fields((field := "test",), {field: "test"})
227-
assert result[field].isupper()
228-
229-
230-
def test_uppercase_fields_field_does_not_exist() -> None:
231-
result = uppercase_fields((field := "test",), {})
232-
assert field not in result
233-
234-
235216
def test_get_fullname_key_key_exists() -> None:
236217
assert get_fullname_key((key := "full_name",)) == key
237218

@@ -244,14 +225,12 @@ def test_preprocess_individual(mocker: MockerFixture) -> None:
244225
normalize_json_mock = mocker.patch("country_workspace.contrib.kobo.sync.normalize_json")
245226
clean_field_names_mock = mocker.patch("country_workspace.contrib.kobo.sync.clean_field_names")
246227
partial_mock = mocker.patch("country_workspace.contrib.kobo.sync.partial")
247-
reduce_mock = mocker.patch("country_workspace.contrib.kobo.sync.reduce")
248-
apply_processor_mock = mocker.patch("country_workspace.contrib.kobo.sync.apply_processor")
228+
compose_mock = mocker.patch("country_workspace.contrib.kobo.sync.compose")
249229
individual = Mock()
250230

251-
assert preprocess_individual(individual) == reduce_mock.return_value
231+
assert preprocess_individual(individual) == compose_mock.return_value.return_value
252232
partial_mock.assert_called_once_with(
253233
clean_field_names_mock, fields_to_uppercase=FIELDS_TO_UPPERCASE + TO_UPPERCASE_FIELDS
254234
)
255-
reduce_mock.assert_called_once_with(
256-
apply_processor_mock, (normalize_json_mock, partial_mock.return_value), individual
257-
)
235+
compose_mock.assert_called_once_with(normalize_json_mock, partial_mock.return_value)
236+
compose_mock.return_value.assert_called_once_with(individual)

tests/datasources/test_rdi.py

Lines changed: 2 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
merge_images,
2424
read_sheets,
2525
full_name_column,
26-
postprocess_cell,
2726
)
2827
from country_workspace.datasources.utils import strip_time_iso
2928
from country_workspace.models import Household
@@ -294,8 +293,7 @@ def test_merge_images() -> None:
294293

295294
def test_read_sheets(mocker: MockerFixture) -> None:
296295
fake_sheets = ((Mock(), sheet := Mock()),)
297-
postprocess_cell_mock = mocker.patch("country_workspace.datasources.rdi.postprocess_cell")
298-
postprocess_cell_mock.return_value = fake_sheets
296+
strip_time_iso_mock = mocker.patch("country_workspace.datasources.rdi.strip_time_iso")
299297
open_xls_multi_mock = mocker.patch("country_workspace.datasources.rdi.open_xls_multi")
300298
open_xls_multi_mock.return_value = fake_sheets
301299
extract_images_mock = mocker.patch("country_workspace.datasources.rdi.extract_images")
@@ -307,7 +305,7 @@ def test_read_sheets(mocker: MockerFixture) -> None:
307305
result = list(read_sheets(config_mock, filepath := "test", sheet_index := 0))
308306

309307
assert result == [filter_rows_with_household_pk_mock.return_value]
310-
open_xls_multi_mock.assert_called_once_with(filepath, sheets=[sheet_index])
308+
open_xls_multi_mock.assert_called_once_with(filepath, sheets=[sheet_index], value_mapper=strip_time_iso_mock)
311309
extract_images_mock.assert_called_once_with(filepath, sheet_index)
312310
merge_images_mock.assert_called_once_with(sheet, images)
313311
filter_rows_with_household_pk_mock.assert_called_once_with(config_mock, merge_images_mock.return_value)
@@ -339,25 +337,3 @@ def test_full_name_column(record: Record, expected: str | None) -> None:
339337
)
340338
def test_strip_time_iso(inp, expected):
341339
assert strip_time_iso(inp) == expected
342-
343-
344-
def test_postprocess_cell_simple():
345-
sheets = [
346-
(
347-
7,
348-
[
349-
{"a": "2025-05-15 08:00:00", "b": "hello"},
350-
{"a": "nope", "b": "2025-05-16 00:00:00"},
351-
],
352-
)
353-
]
354-
out = list(postprocess_cell(sheets))
355-
idx, rows_gen = out[0]
356-
assert len(out) == 1
357-
assert idx == 7
358-
359-
rows = list(rows_gen)
360-
assert rows == [
361-
{"a": "2025-05-15", "b": "hello"},
362-
{"a": "nope", "b": "2025-05-16"},
363-
]

tests/utils/test_functional.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
from country_workspace.utils.functional import apply, compose
2+
from typing import TYPE_CHECKING
3+
4+
if TYPE_CHECKING:
5+
from collections.abc import Callable
6+
7+
8+
def test_apply() -> None:
9+
f: Callable[[int], int] = lambda x: x + 1
10+
assert apply(1, f) == f(1)
11+
12+
13+
def test_compose() -> None:
14+
f0: Callable[[int], int] = lambda x: x + 1
15+
f1: Callable[[int], int] = lambda x: x * 2
16+
assert compose(f0, f1)(1) == f1(f0(1))

0 commit comments

Comments
 (0)