-
Notifications
You must be signed in to change notification settings - Fork 60
/
Copy pathexceptions.py
98 lines (75 loc) · 3.11 KB
/
exceptions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
from functools import wraps
from requests import HTTPError
from ..legacy import APIError
def _get_http_error_msg(exc):
if isinstance(exc, HTTPError):
try:
payload = exc.response.json()
except ValueError:
payload = None
if payload and isinstance(payload, dict):
message = payload.get('message')
if message:
return message
elif exc.response.text:
return exc.response.text
return str(exc)
class ScrapinghubAPIError(Exception):
"""Base exception class."""
def __init__(self, message=None, http_error=None):
self.http_error = http_error
if not message:
message = _get_http_error_msg(http_error)
super().__init__(message)
class BadRequest(ScrapinghubAPIError):
"""Usually raised in case of 400 response from API."""
class Unauthorized(ScrapinghubAPIError):
"""Request lacks valid authentication credentials for the target resource."""
class Forbidden(ScrapinghubAPIError):
"""You don't have the permission to access the requested resource.
It is either read-protected or not readable by the server."""
class NotFound(ScrapinghubAPIError):
"""Entity doesn't exist (e.g. spider or project)."""
class ValueTooLarge(ScrapinghubAPIError):
"""Value cannot be writtent because it exceeds size limits."""
class DuplicateJobError(ScrapinghubAPIError):
"""Job for given spider with given arguments is already scheduled or running."""
class ServerError(ScrapinghubAPIError):
"""Indicates some server error: something unexpected has happened."""
def _wrap_http_errors(method):
"""Internal helper to handle exceptions gracefully."""
@wraps(method)
def wrapped(*args, **kwargs):
try:
return method(*args, **kwargs)
except HTTPError as exc:
status_code = exc.response.status_code
if status_code == 400:
raise BadRequest(http_error=exc)
elif status_code == 401:
raise Unauthorized(http_error=exc)
elif status_code == 403:
raise Forbidden(http_error=exc)
elif status_code == 404:
raise NotFound(http_error=exc)
elif status_code == 413:
raise ValueTooLarge(http_error=exc)
elif 400 <= status_code < 500:
raise ScrapinghubAPIError(http_error=exc)
elif 500 <= status_code < 600:
raise ServerError(http_error=exc)
raise
except APIError as exc:
msg = exc.args[0]
if exc._type == APIError.ERR_NOT_FOUND:
raise NotFound(msg)
elif exc._type == APIError.ERR_VALUE_ERROR:
raise ValueError(msg)
elif exc._type == APIError.ERR_BAD_REQUEST:
raise BadRequest(msg)
elif exc._type == APIError.ERR_AUTH_ERROR:
raise Unauthorized(http_error=exc)
elif exc._type == APIError.ERR_SERVER_ERROR:
raise ServerError(http_error=exc)
raise ScrapinghubAPIError(msg)
return wrapped