-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathtest_app.py
More file actions
147 lines (111 loc) · 6.31 KB
/
test_app.py
File metadata and controls
147 lines (111 loc) · 6.31 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
import os
import pytest
import requests
from t_res.utils.dataclasses import SentenceMentions, Candidates, Predictions
from .app_fixtures import dict_candidates
API_URL = "http://127.0.0.1:8123"
# API_URL = f"http://{os.getenv('HOST_URL')}:8000/v2/t-res_deezy_reldisamb-wpubl-wmtops"
@pytest.mark.skip(reason="integration test")
def test_root():
response = requests.get(f'{API_URL}/')
assert response.status_code == 200
assert 'Title' in response.json().keys()
@pytest.mark.skip(reason="integration test")
def test_health():
response = requests.get(f'{API_URL}/health')
assert response.status_code == 200
assert response.json() == {'status': 'ok'}
@pytest.mark.skip(reason="integration test")
def test_run_ner():
test_body = {"text": "Harvey, from London;Thomas and Elizabeth, Barnett."}
expected_response = [{'sentence': {'sentence': 'Harvey, from London;Thomas and Elizabeth, Barnett.'}, 'mentions': [{'sort_index': 13, 'mention': 'London', 'start_offset': 3, 'end_offset': 3, 'start_char': 13, 'ner_score': 0.997, 'ner_label': 'LOC', 'entity_link': 'O'}]}]
response = requests.get(f'{API_URL}/run_ner', json=test_body)
assert response.status_code == 200
assert response.json() == expected_response
# Test deserialisation:
result = SentenceMentions.from_json(response.json())
assert len(result) == 1
assert result[0].sentence.sentence == test_body['text']
assert len(result[0].mentions) == 1
assert result[0].mentions[0].mention == "London"
@pytest.mark.skip(reason="integration test")
def test_run_candidate_selection():
test_body = {"sentence_mentions": [{'sentence': {'sentence': 'Harvey, from London;Thomas and Elizabeth, Barnett.'}, 'mentions': [{'sort_index': 13, 'mention': 'London', 'start_offset': 3, 'end_offset': 3, 'start_char': 13, 'ner_score': 0.997, 'ner_label': 'LOC', 'entity_link': 'O'}]}]}
response = requests.get(f'{API_URL}/run_candidate_selection', json=test_body)
assert response.status_code == 200
# Test deserialisation:
result = Candidates.from_dict(response.json())
assert result.text() == 'Harvey, from London;Thomas and Elizabeth, Barnett.'
assert result.place_of_pub_wqid() is None
assert result.place_of_pub() is None
assert len(result.candidates()) == 1
assert result.candidates()[0].mention.mention == "London"
assert result.candidates()[0].best_string_match().variation == "London"
assert result.candidates()[0].best_string_match().string_similarity == 1.0
# Test with place of publication info.
test_body['place_of_pub_wqid'] = 'Q203349'
test_body['place_of_pub'] = 'Poole, Dorset'
response = requests.get(f'{API_URL}/run_candidate_selection', json=test_body)
assert response.status_code == 200
# Test deserialisation:
result = Candidates.from_dict(response.json())
assert result.text() == 'Harvey, from London;Thomas and Elizabeth, Barnett.'
assert result.place_of_pub_wqid() == 'Q203349'
assert result.place_of_pub() == 'Poole, Dorset'
@pytest.mark.skip(reason="integration test")
def test_run_disambiguation():
test_body = {"candidates": dict_candidates}
response = requests.get(f'{API_URL}/run_disambiguation', json=test_body)
assert response.status_code == 200
# Test deserialisation:
result = Predictions.from_dict(response.json())
# Note: test server is configured to require place of
# publication info for disambiguation.
assert result.place_of_pub_wqid() == 'Q203349'
assert result.place_of_pub() == 'Poole, Dorset'
assert len(result.candidates()) == 1
assert result.candidates()[0].mention.mention == "London"
assert result.candidates()[0].best_string_match().variation == "London"
assert result.candidates()[0].best_string_match().string_similarity == 1.0
assert result.candidates()[0].best_wqid() == 'Q84'
assert result.candidates()[0].best_disambiguation_score() == pytest.approx(0.894, 1e-3)
@pytest.mark.skip(reason="integration test")
def test_run_pipeline():
test_body = {"text": "A remarkable case of rattening has just occurred in the building trade at Sheffield, but also in Leeds. Not in London, though."}
test_body['place_of_pub_wqid'] = 'Q84'
test_body['place_of_pub'] = 'London'
response = requests.get(f'{API_URL}/run_pipeline', json=test_body)
assert response.status_code == 200
# Test deserialisation:
result = Predictions.from_dict(response.json())
assert result.place_of_pub_wqid() == 'Q84'
assert result.place_of_pub() == 'London'
assert len(result.candidates()) == 3
assert result.best_wqids() == ['Q42448', 'Q39121', 'Q84'] # Sheffield, Leeds, London
@pytest.mark.skip(reason="integration test")
def test_test_pipeline():
response = requests.get(f'{API_URL}/test')
assert response.status_code == 200
# Test deserialisation:
result = Predictions.from_dict(response.json())
assert isinstance(result, Predictions)
### OLD:
@pytest.mark.skip(reason="integration test")
def test_t_res():
test_body = {
"sentence": "A remarkable case of rattening has just occurred in the building trade at Newtown.",
"place": "Powys",
"place_wqid": "Q156150"
}
expected_response = [{'mention': 'Newtown', 'ner_score': 0.996, 'pos': 74, 'sent_idx': 0, 'end_pos': 81, 'tag': 'LOC', 'sentence': 'A remarkable case of rattening has just occurred in the building trade at Newtown.', 'prediction': 'Q669171', 'ed_score': 0.034, 'cross_cand_score': {'Q669171': 0.41, 'Q1851145': 0.298, 'Q5355774': 0.143, 'Q738356': 0.107, 'Q15262210': 0.024, 'Q7020654': 0.018, 'Q18748305': 0.0}, 'prior_cand_score': {'Q1851145': 0.86, 'Q669171': 0.734, 'Q5355774': 0.537, 'Q738356': 0.516, 'Q15262210': 0.485, 'Q7020654': 0.483, 'Q18748305': 0.476}, 'latlon': [52.5132, -3.3141], 'wkdt_class': 'Q3957'}]
response = requests.get(f'{API_URL}/toponym_resolution', json=test_body)
assert response.status_code == 200
assert response.json() == expected_response
@pytest.mark.skip(reason="integration test")
def test_ner():
test_body = {"sentence": "Harvey, from London;Thomas and Elizabeth, Barnett."}
expected_response = [{"entity":"B-LOC","score":0.990628182888031,"word":"London","start":13,"end":19}]
response = requests.get(f'{API_URL}/ner', json=test_body)
assert response.status_code == 200
assert response.json() == expected_response
#