Skip to content

Commit d962d7e

Browse files
committed
rename fetch_doi to fetch_doi_prefix
1 parent 1b9b183 commit d962d7e

File tree

3 files changed

+27
-27
lines changed

3 files changed

+27
-27
lines changed

src/datacite/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
"""DataCite utilities package."""
22
from .datacite import (
3-
fetch_doi,
3+
fetch_doi_prefix,
44
write_ndjson_gz,
55
)
66

77
__all__ = [
8-
"fetch_doi",
8+
"fetch_doi_prefix",
99
"write_ndjson_gz",
1010
]

src/datacite/datacite.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,11 @@
55
It handles pagination, retry logic, and efficient streaming to minimize memory usage.
66
77
Primary Entrypoint:
8-
fetch_doi: Iterator that automatically fetches all DOI objects matching the
8+
fetch_doi_prefix: Iterator that automatically fetches all DOI objects matching the
99
specified criteria. This is the main function most users will need.
1010
1111
Public Functions:
12-
fetch_doi: Fetch all DOI objects from DataCite API with automatic pagination.
12+
fetch_doi_prefix: Fetch all DOI objects from DataCite API with automatic pagination.
1313
Returns an iterator yielding individual DOI objects. Handles session
1414
management and pagination transparently.
1515
@@ -19,10 +19,10 @@
1919
Example:
2020
Basic usage to fetch and save Synapse DOIs:
2121
22-
>>> from datacite import fetch_doi, write_ndjson_gz
22+
>>> from datacite import fetch_doi_prefix, write_ndjson_gz
2323
>>>
2424
>>> # Fetch all findable DOIs with prefix 10.7303
25-
>>> dois = fetch_doi(
25+
>>> dois = fetch_doi_prefix(
2626
... prefixes=["10.7303"],
2727
... state="findable",
2828
... user_agent_mailto="user@example.com"
@@ -271,7 +271,7 @@ def _fetch_doi_page(
271271
return result
272272

273273

274-
def fetch_doi(
274+
def fetch_doi_prefix(
275275
prefixes: List[str],
276276
state: str = "findable",
277277
page_size: int = 1000,
@@ -302,7 +302,7 @@ def fetch_doi(
302302
requests.HTTPError: If any API request fails with non-retryable error.
303303
304304
Example:
305-
>>> for doi in fetch_doi(["10.7303"], user_agent_mailto="user@example.com"):
305+
>>> for doi in fetch_doi_prefix(["10.7303"], user_agent_mailto="user@example.com"):
306306
... print(doi["id"])
307307
"""
308308
_validate_fetch_params(page_size, state)

tests/datacite/test_datacite.py

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
_should_continue_pagination,
2626
_serialize_to_ndjson,
2727
_fetch_doi_page,
28-
fetch_doi,
28+
fetch_doi_prefix,
2929
write_ndjson_gz,
3030
)
3131

@@ -710,7 +710,7 @@ def test_valid_states(self, prefixes, state, create_mock_response):
710710

711711

712712
class TestFetchDoi:
713-
"""Tests for fetch_doi function.
713+
"""Tests for fetch_doi_prefix function.
714714
715715
Tests full pagination flow with generator behavior.
716716
"""
@@ -723,7 +723,7 @@ def test_single_full_page(self, prefixes, sample_doi_objects, mocker):
723723
{"data": []} # Empty next page
724724
]
725725

726-
results = list(fetch_doi(
726+
results = list(fetch_doi_prefix(
727727
prefixes=prefixes,
728728
state="findable",
729729
page_size=10,
@@ -749,7 +749,7 @@ def test_multiple_pages(self, prefixes, mocker):
749749
{"data": page3_data}, # Partial page - should stop
750750
]
751751

752-
results = list(fetch_doi(
752+
results = list(fetch_doi_prefix(
753753
prefixes=prefixes,
754754
state="findable",
755755
page_size=10,
@@ -765,7 +765,7 @@ def test_empty_results(self, prefixes, mocker):
765765
mock_fetch_page = mocker.patch("src.datacite.datacite._fetch_doi_page")
766766
mock_fetch_page.return_value = {"data": []}
767767

768-
results = list(fetch_doi(
768+
results = list(fetch_doi_prefix(
769769
prefixes=prefixes,
770770
state="findable",
771771
page_size=10,
@@ -789,7 +789,7 @@ def test_user_agent_header_set(self, prefixes, mocker):
789789
mock_update = MagicMock()
790790
mock_session.headers.update = mock_update
791791

792-
list(fetch_doi(
792+
list(fetch_doi_prefix(
793793
prefixes=prefixes,
794794
state="findable",
795795
page_size=10,
@@ -807,7 +807,7 @@ def test_start_page_parameter(self, prefixes, mocker):
807807
mock_fetch_page = mocker.patch("src.datacite.datacite._fetch_doi_page")
808808
mock_fetch_page.return_value = {"data": [{"id": "test"}]}
809809

810-
list(fetch_doi(
810+
list(fetch_doi_prefix(
811811
prefixes=prefixes,
812812
state="findable",
813813
page_size=10,
@@ -830,7 +830,7 @@ def test_stops_on_partial_page(self, prefixes, mocker):
830830
{"data": page2_data}
831831
]
832832

833-
results = list(fetch_doi(
833+
results = list(fetch_doi_prefix(
834834
prefixes=prefixes,
835835
page_size=10
836836
))
@@ -850,7 +850,7 @@ def test_handles_api_error_mid_pagination(self, prefixes, mocker):
850850
]
851851

852852
with pytest.raises(requests.HTTPError):
853-
list(fetch_doi(
853+
list(fetch_doi_prefix(
854854
prefixes=prefixes,
855855
page_size=10
856856
))
@@ -861,7 +861,7 @@ def test_response_with_missing_data_key(self, prefixes, mocker):
861861
# API returns response without 'data' key
862862
mock_fetch_page.return_value = {"meta": {"total": 0}, "links": {}}
863863

864-
results = list(fetch_doi(
864+
results = list(fetch_doi_prefix(
865865
prefixes=prefixes,
866866
page_size=10
867867
))
@@ -881,7 +881,7 @@ def test_no_user_agent_when_mailto_none(self, prefixes, mocker):
881881
mock_update = MagicMock()
882882
mock_session.headers.update = mock_update
883883

884-
list(fetch_doi(
884+
list(fetch_doi_prefix(
885885
prefixes=prefixes,
886886
state="findable",
887887
page_size=10,
@@ -902,7 +902,7 @@ def test_large_page_size_boundary(self, prefixes, mocker):
902902
{"data": []} # No more data
903903
]
904904

905-
results = list(fetch_doi(
905+
results = list(fetch_doi_prefix(
906906
prefixes=prefixes,
907907
page_size=1000 # Maximum page size
908908
))
@@ -914,23 +914,23 @@ def test_large_page_size_boundary(self, prefixes, mocker):
914914
def test_invalid_page_size_zero(self, prefixes):
915915
"""Test that page_size=0 raises ValueError."""
916916
with pytest.raises(ValueError, match="page_size must be at least 1"):
917-
list(fetch_doi(
917+
list(fetch_doi_prefix(
918918
prefixes=prefixes,
919919
page_size=0
920920
))
921921

922922
def test_invalid_page_size_negative(self, prefixes):
923923
"""Test that negative page_size raises ValueError."""
924924
with pytest.raises(ValueError, match="page_size must be at least 1"):
925-
list(fetch_doi(
925+
list(fetch_doi_prefix(
926926
prefixes=prefixes,
927927
page_size=-5
928928
))
929929

930930
def test_invalid_page_size_exceeds_maximum(self, prefixes):
931931
"""Test that page_size > 1000 raises ValueError."""
932932
with pytest.raises(ValueError, match="page_size cannot exceed 1000"):
933-
list(fetch_doi(
933+
list(fetch_doi_prefix(
934934
prefixes=prefixes,
935935
page_size=2000
936936
))
@@ -941,19 +941,19 @@ def test_page_size_boundary_values(self, prefixes, mocker):
941941
mock_fetch_page.return_value = {"data": []}
942942

943943
# page_size=1 should work
944-
list(fetch_doi(prefixes=prefixes, page_size=1))
944+
list(fetch_doi_prefix(prefixes=prefixes, page_size=1))
945945
assert mock_fetch_page.called
946946

947947
mock_fetch_page.reset_mock()
948948

949949
# page_size=1000 should work
950-
list(fetch_doi(prefixes=prefixes, page_size=1000))
950+
list(fetch_doi_prefix(prefixes=prefixes, page_size=1000))
951951
assert mock_fetch_page.called
952952

953953
def test_invalid_state(self, prefixes):
954954
"""Test that invalid state raises ValueError."""
955955
with pytest.raises(ValueError, match="state must be one of"):
956-
list(fetch_doi(
956+
list(fetch_doi_prefix(
957957
prefixes=prefixes,
958958
state="invalid_state"
959959
))
@@ -965,7 +965,7 @@ def test_valid_states(self, prefixes, state, mocker):
965965
mock_fetch_page.return_value = {"data": []}
966966

967967
# Should not raise ValueError
968-
list(fetch_doi(prefixes=prefixes, state=state))
968+
list(fetch_doi_prefix(prefixes=prefixes, state=state))
969969
assert mock_fetch_page.called
970970

971971

0 commit comments

Comments
 (0)