Wrap requests exceptions

This commit is contained in:
Mads Marquart
2020-01-15 12:17:16 +01:00
parent 8f25a3bae8
commit e25f53d9a9
4 changed files with 77 additions and 17 deletions

View File

@@ -1203,16 +1203,19 @@ class Client:
self._parse_message(content)
except KeyboardInterrupt:
return False
except requests.Timeout:
pass
except requests.ConnectionError:
# If the client has lost their internet connection, keep trying every 30 seconds
time.sleep(30)
except _exception.HTTPError as e:
cause = e.__cause__
# Fix 502 and 503 pull errors
if e.status_code in [502, 503]:
# Bump pull channel, while contraining withing 0-4
self._pull_channel = (self._pull_channel + 1) % 5
# TODO: Handle these exceptions better
elif isinstance(cause, requests.ReadTimeout):
pass # Expected
elif isinstance(cause, (requests.ConnectTimeout, requests.ConnectionError)):
# If the client has lost their internet connection, keep trying every 30 seconds
time.sleep(30)
else:
raise e
except Exception as e:

View File

@@ -1,4 +1,5 @@
import attr
import requests
# Not frozen, since that doesn't work in PyPy
attrs_exception = attr.s(slots=True, auto_exc=True)
@@ -133,3 +134,18 @@ def handle_http_error(code):
)
if 400 <= code < 600:
raise HTTPError(msg, status_code=code)
def handle_requests_error(e):
if isinstance(e, requests.ConnectionError):
raise HTTPError("Connection error") from e
if isinstance(e, requests.HTTPError):
pass # Raised when using .raise_for_status, so should never happen
if isinstance(e, requests.URLRequired):
pass # Should never happen, we always prove valid URLs
if isinstance(e, requests.TooManyRedirects):
pass # TODO: Consider using allow_redirects=False to prevent this
if isinstance(e, requests.Timeout):
pass # Should never happen, we don't set timeouts
raise HTTPError("Requests error") from e

View File

@@ -157,7 +157,12 @@ class Session:
"""
session = session_factory()
soup = find_input_fields(session.get("https://m.facebook.com/").text)
try:
r = session.get("https://m.facebook.com/")
except requests.RequestException as e:
_exception.handle_requests_error(e)
soup = find_input_fields(r.text)
data = dict(
(elem["name"], elem["value"])
for elem in soup
@@ -167,7 +172,11 @@ class Session:
data["pass"] = password
data["login"] = "Log In"
r = session.post("https://m.facebook.com/login.php?login_attempt=1", data=data)
try:
url = "https://m.facebook.com/login.php?login_attempt=1"
r = session.post(url, data=data)
except requests.RequestException as e:
_exception.handle_requests_error(e)
# Usually, 'Checkpoint' will refer to 2FA
if "checkpoint" in r.url and ('id="approvals_code"' in r.text.lower()):
@@ -176,11 +185,17 @@ class Session:
"2FA code required, please add `on_2fa_callback` to .login"
)
code = on_2fa_callback()
try:
r = _2fa_helper(session, code, r)
except requests.RequestException as e:
_exception.handle_requests_error(e)
# Sometimes Facebook tries to show the user a "Save Device" dialog
if "save-device" in r.url:
try:
r = session.get("https://m.facebook.com/login/save-device/cancel/")
except requests.RequestException as e:
_exception.handle_requests_error(e)
if is_home(r.url):
return cls._from_session(session=session)
@@ -198,7 +213,10 @@ class Session:
"""
# Send a request to the login url, to see if we're directed to the home page
url = "https://m.facebook.com/login.php?login_attempt=1"
try:
r = self._session.get(url, allow_redirects=False)
except requests.RequestException as e:
_exception.handle_requests_error(e)
return "Location" in r.headers and is_home(r.headers["Location"])
def logout(self):
@@ -209,20 +227,28 @@ class Session:
logout_h = self._logout_h
if not logout_h:
url = _util.prefix_url("/bluebar/modern_settings_menu/")
try:
h_r = self._session.post(url, data={"pmid": "4"})
except requests.RequestException as e:
_exception.handle_requests_error(e)
logout_h = re.search(r'name=\\"h\\" value=\\"(.*?)\\"', h_r.text).group(1)
url = _util.prefix_url("/logout.php")
try:
r = self._session.get(url, params={"ref": "mb", "h": logout_h})
if not r.ok:
raise exception.HTTPError("Failed logging out", status_code=r.status_code)
except requests.RequestException as e:
_exception.handle_requests_error(e)
handle_http_error(r.status_code)
@classmethod
def _from_session(cls, session):
# TODO: Automatically set user_id when the cookie changes in the session
user_id = get_user_id(session)
try:
r = session.get(_util.prefix_url("/"))
except requests.RequestException as e:
_exception.handle_requests_error(e)
soup = find_input_fields(r.text)
@@ -270,13 +296,19 @@ class Session:
def _get(self, url, params, error_retries=3):
params.update(self._get_params())
try:
r = self._session.get(_util.prefix_url(url), params=params)
except requests.RequestException as e:
_exception.handle_requests_error(e)
content = _util.check_request(r)
return _util.to_json(content)
def _post(self, url, data, files=None, as_graphql=False):
data.update(self._get_params())
try:
r = self._session.post(_util.prefix_url(url), data=data, files=files)
except requests.RequestException as e:
_exception.handle_requests_error(e)
content = _util.check_request(r)
if as_graphql:
return _graphql.response_to_json(content)

View File

@@ -1,4 +1,5 @@
import pytest
import requests
from fbchat import (
FacebookError,
HTTPError,
@@ -13,6 +14,7 @@ from fbchat._exception import (
handle_payload_error,
handle_graphql_errors,
handle_http_error,
handle_requests_error,
)
@@ -123,3 +125,10 @@ def test_handle_http_error_404_handling():
def test_handle_http_error_no_error():
assert handle_http_error(200) is None
assert handle_http_error(302) is None
def test_handle_requests_error():
with pytest.raises(HTTPError, match="Connection error"):
handle_requests_error(requests.ConnectionError())
with pytest.raises(HTTPError, match="Requests error"):
handle_requests_error(requests.RequestException())