|
23 | 23 | from datetime import date, datetime, timedelta |
24 | 24 | import json |
25 | 25 | import re |
| 26 | + |
26 | 27 | from typing import Union, Optional |
27 | 28 | from queue import Queue |
28 | 29 | from threading import Thread |
29 | 30 | import requests.utils |
| 31 | +from requests.exceptions import HTTPError |
30 | 32 |
|
31 | 33 | import sonar.logging as log |
32 | 34 | import sonar.platform as pf |
@@ -702,12 +704,15 @@ def __search_thread(queue: Queue) -> None: |
702 | 704 | page_params = params.copy() |
703 | 705 | page_params["p"] = page |
704 | 706 | log.debug("Threaded issue search params = %s", str(page_params)) |
705 | | - data = json.loads(endpoint.get(api, params=page_params).text) |
706 | | - for i in data["issues"]: |
707 | | - i["branch"] = page_params.get("branch", None) |
708 | | - i["pullRequest"] = page_params.get("pullRequest", None) |
709 | | - issue_list[i["key"]] = get_object(endpoint=endpoint, key=i["key"], data=i) |
710 | | - log.debug("Added %d issues in threaded search page %d", len(data["issues"]), page) |
| 707 | + try: |
| 708 | + data = json.loads(endpoint.get(api, params=page_params).text) |
| 709 | + for i in data["issues"]: |
| 710 | + i["branch"] = page_params.get("branch", None) |
| 711 | + i["pullRequest"] = page_params.get("pullRequest", None) |
| 712 | + issue_list[i["key"]] = get_object(endpoint=endpoint, key=i["key"], data=i) |
| 713 | + log.debug("Added %d issues in threaded search page %d", len(data["issues"]), page) |
| 714 | + except HTTPError as e: |
| 715 | + log.critical("HTTP Error while searching issues, search may be incomplete: %s", str(e)) |
711 | 716 | queue.task_done() |
712 | 717 |
|
713 | 718 |
|
|
0 commit comments