Compare commits

...

59 Commits

Author SHA1 Message Date
Mads Marquart
0761116335 Bump version: 1.9.6 → 1.9.7 2020-06-08 22:20:43 +02:00
Mads Marquart
86d7220126 Merge pull request #586 from qwertyuu/fix-typeerror-v1
Fix AttributeError on login
2020-06-08 22:19:30 +02:00
Raphaël Côté
e175ec791c Update _state.py 2020-05-31 22:38:08 -04:00
Mads Marquart
e54be7583a Merge pull request #547 from Benjamin-Loison/patch-1
Update fetch.py
2020-04-06 12:53:58 +02:00
Benjamin Loison
fdf64597ec Update fetch.py
Variable name mismatched.
2020-04-06 12:46:34 +02:00
Mads Marquart
064707ac23 Add error handling for when the listener has been logged out 2020-01-24 21:19:58 +01:00
Mads Marquart
b9b4d57b25 Bump version: 1.9.5 → 1.9.6 2020-01-21 19:50:57 +01:00
Mads Marquart
b4618739f3 Fix MQTT errors after being offline for too long 2020-01-21 19:39:59 +01:00
Mads Marquart
22c6c82c0e Disable /t_rtc MQTT topic 2020-01-20 14:54:25 +01:00
Mads Marquart
19c875c18a Bump version: 1.9.4 → 1.9.5 2020-01-20 09:32:30 +01:00
Mateusz Soszyński
12bbc0058c Add onPendingMessage (#512) 2020-01-20 09:28:41 +01:00
Mads Marquart
9c81806b95 Bump version: 1.9.3 → 1.9.4 2020-01-14 23:29:58 +01:00
Mads Marquart
45303005b8 Fix onFriendRequest 2020-01-14 23:27:50 +01:00
Mads Marquart
881aa9adce Bump version: 1.9.2 → 1.9.3 2020-01-08 09:38:18 +01:00
Mads Marquart
4714be5697 Fix MQTT JSON decoding 2020-01-08 09:35:26 +01:00
Mads Marquart
cb7f4a72d7 Bump version: 1.9.1 → 1.9.2 2020-01-08 08:47:16 +01:00
Mads Marquart
fb63ff0db8 Fix cookie header extraction
Only worked when the cookies were loaded from file, hence the reason I
didn't spot it the first time. Thanks to @gave92 for the suggestion.

Fixes #495
2020-01-08 08:46:22 +01:00
Mads Marquart
c5f447e20b Bump version: 1.9.0 → 1.9.1 2020-01-06 13:23:39 +01:00
Mads Marquart
b4d3769fd5 Fix MQTT error handling
- Fix "Out of memory" errors
- Fix typo
2020-01-06 13:14:07 +01:00
Mads Marquart
b199d597b2 Bump version: 1.8.3 → 1.9.0 2020-01-06 10:57:19 +01:00
Mads Marquart
debfb37a47 Merge pull request #494 from carpedm20/websocket-mqtt-support
Add MQTT over WebSockets support
2020-01-06 10:51:20 +01:00
Mads Marquart
67fd6ffdf6 Better document MQTT topics 2020-01-06 10:34:39 +01:00
Mads Marquart
e57265016e Skip NoOp events 2020-01-06 10:27:40 +01:00
Mads Marquart
cf4c22898c Add undocumented _onSeen callback
Mostly just to slowly document unknown events
2020-01-06 10:27:11 +01:00
Mads Marquart
3bb99541e7 Improve MQTT connection error reporting 2020-01-05 23:44:19 +01:00
Mads Marquart
8c367af0ff Fix Python 2.7 errors 2020-01-05 20:52:50 +01:00
Mads Marquart
bc1e3edf17 Small fixes
Handle more errors, and fix Client.stopListening
2020-01-05 20:29:44 +01:00
Mads Marquart
e488f4a7da Fix typing status parsing
Co-authored-by: Tulir Asokan <tulir@maunium.net>
2020-01-05 19:57:53 +01:00
Mads Marquart
afad38d8e1 Fix chat timestamp parsing 2020-01-05 19:57:53 +01:00
Mads Marquart
e9804d4184 Fix message parsing 2020-01-05 19:57:53 +01:00
Mads Marquart
a1b80a7abb Replace pull channel with MQTT setup 2020-01-05 19:57:53 +01:00
Mads Marquart
803bfa7084 Add proper MQTT error handling 2020-01-05 19:57:53 +01:00
Mads Marquart
d1cb866b44 Refactor MQTT listening 2020-01-05 19:57:52 +01:00
Mads Marquart
a298e0cf16 Refactor MQTT to do proper reconnecting 2020-01-05 14:56:01 +01:00
Mads Marquart
766b0125fb Refactor MQTT connecting, add sync token support 2020-01-05 00:31:58 +01:00
Mads Marquart
998fa43fb2 Refactor MQTT connecting 2020-01-04 23:18:20 +01:00
Mads Marquart
ecc6edac5a Fix message receiving in MQTT 2020-01-04 16:23:51 +01:00
Mads Marquart
ea518ba4c9 Add initial MQTT helper 2020-01-04 16:23:35 +01:00
Mads Marquart
ffdf4222bf Split ._parseMessage to reduce indentation 2019-12-15 16:24:17 +01:00
Mads Marquart
a97ef67411 Backport e348425 2019-12-15 15:26:53 +01:00
Mads Marquart
813219cd9c Bump version: 1.8.2 → 1.8.3 2019-09-08 15:59:29 +02:00
Asiel Díaz Benítez
bb1f7d9294 Fix mimetypes.guess_type (#471)
`mimetypes.guess_type` fails if the url is something like `http://example.com/file.zip?u=10`.

Backported from 6bffb66
2019-09-08 15:58:34 +02:00
Mads Marquart
3d28c958d3 Bump version: 1.8.1 → 1.8.2 2019-09-05 20:07:44 +02:00
Marco Gavelli
6b68916d74 Fix Python 2 only issue (str.split does not take keyword parameters)
Fixes #469
2019-09-05 20:02:51 +02:00
Mads Marquart
12e752e681 Bump version: 1.8.0 → 1.8.1 2019-08-28 19:21:39 +02:00
Mads Marquart
1f342d0c71 Move Client._getSendData into the Thread / Group models 2019-08-28 18:07:21 +02:00
Mads Marquart
5e86d4a48a Add method to convert a ThreadType to a subclass of Thread (e.g. Group) 2019-08-28 18:07:21 +02:00
Mads Marquart
0838f84859 Move most of Client._getSendData to State._do_send_request 2019-08-28 18:07:21 +02:00
Mads Marquart
abc938eacd Make State.fb_dtsg private 2019-08-28 18:07:21 +02:00
Mads Marquart
4d13cd2c0b Move body of Client._doSendRequest to State 2019-08-28 18:07:21 +02:00
Mads Marquart
8f8971c706 Move parts of Client._getSendData to Message._to_send_data 2019-08-28 18:07:21 +02:00
Mads Marquart
2703d9513a Move Client._client_id to State 2019-08-28 18:07:21 +02:00
Mads Marquart
3dce83de93 Move Client._upload to State 2019-08-28 18:07:21 +02:00
Mads Marquart
ef8e7d4251 Move user id handling to State 2019-08-28 18:07:21 +02:00
Mads Marquart
a131e1ae73 Move body of Client.graphql_requests to State._graphql_requests 2019-08-28 18:07:21 +02:00
Mads Marquart
84a86bd7bd Move body of Client._payload_post to State 2019-08-28 18:07:21 +02:00
Mads Marquart
adfb5886c9 Move body of Client._post to State 2019-08-28 18:07:21 +02:00
Mads Marquart
8d237ea4ef Move body of Client._get to State 2019-08-28 18:07:21 +02:00
Mads Marquart
513bc6eadf Move Client._do_refresh to State 2019-08-28 18:07:21 +02:00
15 changed files with 818 additions and 450 deletions

View File

@@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 1.8.0 current_version = 1.9.7
commit = True commit = True
tag = True tag = True

View File

@@ -67,5 +67,5 @@ print("thread's type: {}".format(thread.type))
# Print image url for 20 last images from thread. # Print image url for 20 last images from thread.
images = client.fetchThreadImages("<thread id>") images = client.fetchThreadImages("<thread id>")
for image in islice(image, 20): for image in islice(images, 20):
print(image.large_preview_url) print(image.large_preview_url)

View File

@@ -13,7 +13,7 @@ from ._client import Client
from ._util import log # TODO: Remove this (from examples too) from ._util import log # TODO: Remove this (from examples too)
__title__ = "fbchat" __title__ = "fbchat"
__version__ = "1.8.0" __version__ = "1.9.7"
__description__ = "Facebook Chat (Messenger) for Python" __description__ = "Facebook Chat (Messenger) for Python"
__copyright__ = "Copyright 2015 - 2019 by Taehoon Kim" __copyright__ = "Copyright 2015 - 2019 by Taehoon Kim"

File diff suppressed because it is too large Load Diff

View File

@@ -104,6 +104,9 @@ class Group(Thread):
plan=plan, plan=plan,
) )
def _to_send_data(self):
return {"thread_fbid": self.uid}
@attr.s(cmp=False, init=False) @attr.s(cmp=False, init=False)
class Room(Group): class Room(Group):

View File

@@ -26,7 +26,7 @@ class EmojiSize(Enum):
"s": cls.SMALL, "s": cls.SMALL,
} }
for tag in tags or (): for tag in tags or ():
data = tag.split(":", maxsplit=1) data = tag.split(":", 1)
if len(data) > 1 and data[0] == "hot_emoji_size": if len(data) > 1 and data[0] == "hot_emoji_size":
return string_to_emojisize.get(data[1]) return string_to_emojisize.get(data[1])
return None return None
@@ -151,6 +151,55 @@ class Message(object):
return False return False
return any(map(lambda tag: "forward" in tag or "copy" in tag, tags)) return any(map(lambda tag: "forward" in tag or "copy" in tag, tags))
def _to_send_data(self):
data = {}
if self.text or self.sticker or self.emoji_size:
data["action_type"] = "ma-type:user-generated-message"
if self.text:
data["body"] = self.text
for i, mention in enumerate(self.mentions):
data["profile_xmd[{}][id]".format(i)] = mention.thread_id
data["profile_xmd[{}][offset]".format(i)] = mention.offset
data["profile_xmd[{}][length]".format(i)] = mention.length
data["profile_xmd[{}][type]".format(i)] = "p"
if self.emoji_size:
if self.text:
data["tags[0]"] = "hot_emoji_size:" + self.emoji_size.name.lower()
else:
data["sticker_id"] = self.emoji_size.value
if self.sticker:
data["sticker_id"] = self.sticker.uid
if self.quick_replies:
xmd = {"quick_replies": []}
for quick_reply in self.quick_replies:
# TODO: Move this to `_quick_reply.py`
q = dict()
q["content_type"] = quick_reply._type
q["payload"] = quick_reply.payload
q["external_payload"] = quick_reply.external_payload
q["data"] = quick_reply.data
if quick_reply.is_response:
q["ignore_for_webhook"] = False
if isinstance(quick_reply, _quick_reply.QuickReplyText):
q["title"] = quick_reply.title
if not isinstance(quick_reply, _quick_reply.QuickReplyLocation):
q["image_url"] = quick_reply.image_url
xmd["quick_replies"].append(q)
if len(self.quick_replies) == 1 and self.quick_replies[0].is_response:
xmd["quick_replies"] = xmd["quick_replies"][0]
data["platform_xmd"] = json.dumps(xmd)
if self.reply_to_id:
data["replied_to_message_id"] = self.reply_to_id
return data
@classmethod @classmethod
def _from_graphql(cls, data): def _from_graphql(cls, data):
if data.get("message_sender") is None: if data.get("message_sender") is None:

339
fbchat/_mqtt.py Normal file
View File

@@ -0,0 +1,339 @@
import attr
import random
import paho.mqtt.client
from ._core import log
from . import _util, _exception, _graphql
def generate_session_id():
"""Generate a random session ID between 1 and 9007199254740991."""
return random.randint(1, 2 ** 53)
@attr.s(slots=True)
class Mqtt(object):
_state = attr.ib()
_mqtt = attr.ib()
_on_message = attr.ib()
_chat_on = attr.ib()
_foreground = attr.ib()
_sequence_id = attr.ib()
_sync_token = attr.ib(None)
_HOST = "edge-chat.facebook.com"
@classmethod
def connect(cls, state, on_message, chat_on, foreground):
mqtt = paho.mqtt.client.Client(
client_id="mqttwsclient",
clean_session=True,
protocol=paho.mqtt.client.MQTTv31,
transport="websockets",
)
mqtt.enable_logger()
# mqtt.max_inflight_messages_set(20) # The rest will get queued
# mqtt.max_queued_messages_set(0) # Unlimited messages can be queued
# mqtt.message_retry_set(20) # Retry sending for at least 20 seconds
# mqtt.reconnect_delay_set(min_delay=1, max_delay=120)
# TODO: Is region (lla | atn | odn | others?) important?
mqtt.tls_set()
self = cls(
state=state,
mqtt=mqtt,
on_message=on_message,
chat_on=chat_on,
foreground=foreground,
sequence_id=cls._fetch_sequence_id(state),
)
# Configure callbacks
mqtt.on_message = self._on_message_handler
mqtt.on_connect = self._on_connect_handler
self._configure_connect_options()
# Attempt to connect
try:
rc = mqtt.connect(self._HOST, 443, keepalive=10)
except (
# Taken from .loop_forever
paho.mqtt.client.socket.error,
OSError,
paho.mqtt.client.WebsocketConnectionError,
) as e:
raise _exception.FBchatException("MQTT connection failed")
# Raise error if connecting failed
if rc != paho.mqtt.client.MQTT_ERR_SUCCESS:
err = paho.mqtt.client.error_string(rc)
raise _exception.FBchatException("MQTT connection failed: {}".format(err))
return self
def _on_message_handler(self, client, userdata, message):
# Parse payload JSON
try:
j = _util.parse_json(message.payload.decode("utf-8"))
except (_exception.FBchatFacebookError, UnicodeDecodeError):
log.exception("Failed parsing MQTT data on %s as JSON", message.topic)
return
log.debug("MQTT payload: %s, %s", message.topic, j)
if message.topic == "/t_ms":
# Update sync_token when received
# This is received in the first message after we've created a messenger
# sync queue.
if "syncToken" in j and "firstDeltaSeqId" in j:
self._sync_token = j["syncToken"]
self._sequence_id = j["firstDeltaSeqId"]
return
# Update last sequence id when received
if "lastIssuedSeqId" in j:
self._sequence_id = j["lastIssuedSeqId"]
if "errorCode" in j:
error = j["errorCode"]
# TODO: 'F\xfa\x84\x8c\x85\xf8\xbc-\x88 FB_PAGES_INSUFFICIENT_PERMISSION\x00'
if error in ("ERROR_QUEUE_NOT_FOUND", "ERROR_QUEUE_OVERFLOW"):
# ERROR_QUEUE_NOT_FOUND means that the queue was deleted, since too
# much time passed, or that it was simply missing
# ERROR_QUEUE_OVERFLOW means that the sequence id was too small, so
# the desired events could not be retrieved
log.error(
"The MQTT listener was disconnected for too long,"
" events may have been lost"
)
self._sync_token = None
self._sequence_id = self._fetch_sequence_id(self._state)
self._messenger_queue_publish()
# TODO: Signal to the user that they should reload their data!
return
log.error("MQTT error code %s received", error)
return
# Call the external callback
self._on_message(message.topic, j)
@staticmethod
def _fetch_sequence_id(state):
"""Fetch sequence ID."""
params = {
"limit": 1,
"tags": ["INBOX"],
"before": None,
"includeDeliveryReceipts": False,
"includeSeqID": True,
}
log.debug("Fetching MQTT sequence ID")
# Same request as in `Client.fetchThreadList`
(j,) = state._graphql_requests(_graphql.from_doc_id("1349387578499440", params))
sequence_id = j["viewer"]["message_threads"]["sync_sequence_id"]
if not sequence_id:
raise _exception.FBchatNotLoggedIn("Failed fetching sequence id")
return int(sequence_id)
def _on_connect_handler(self, client, userdata, flags, rc):
if rc == 21:
raise _exception.FBchatException(
"Failed connecting. Maybe your cookies are wrong?"
)
if rc != 0:
return # Don't try to send publish if the connection failed
self._messenger_queue_publish()
def _messenger_queue_publish(self):
# configure receiving messages.
payload = {
"sync_api_version": 10,
"max_deltas_able_to_process": 1000,
"delta_batch_size": 500,
"encoding": "JSON",
"entity_fbid": self._state.user_id,
}
# If we don't have a sync_token, create a new messenger queue
# This is done so that across reconnects, if we've received a sync token, we
# SHOULD receive a piece of data in /t_ms exactly once!
if self._sync_token is None:
topic = "/messenger_sync_create_queue"
payload["initial_titan_sequence_id"] = str(self._sequence_id)
payload["device_params"] = None
else:
topic = "/messenger_sync_get_diffs"
payload["last_seq_id"] = str(self._sequence_id)
payload["sync_token"] = self._sync_token
self._mqtt.publish(topic, _util.json_minimal(payload), qos=1)
def _configure_connect_options(self):
# Generate a new session ID on each reconnect
session_id = generate_session_id()
topics = [
# Things that happen in chats (e.g. messages)
"/t_ms",
# Group typing notifications
"/thread_typing",
# Private chat typing notifications
"/orca_typing_notifications",
# Active notifications
"/orca_presence",
# Other notifications not related to chats (e.g. friend requests)
"/legacy_web",
# Facebook's continuous error reporting/logging?
"/br_sr",
# Response to /br_sr
"/sr_res",
# Data about user-to-user calls
# TODO: Investigate the response from this! (A bunch of binary data)
# "/t_rtc",
# TODO: Find out what this does!
# TODO: Investigate the response from this! (A bunch of binary data)
# "/t_p",
# TODO: Find out what this does!
"/webrtc",
# TODO: Find out what this does!
"/onevc",
# TODO: Find out what this does!
"/notify_disconnect",
# Old, no longer active topics
# These are here just in case something interesting pops up
"/inbox",
"/mercury",
"/messaging_events",
"/orca_message_notifications",
"/pp",
"/webrtc_response",
]
username = {
# The user ID
"u": self._state.user_id,
# Session ID
"s": session_id,
# Active status setting
"chat_on": self._chat_on,
# foreground_state - Whether the window is focused
"fg": self._foreground,
# Can be any random ID
"d": self._state._client_id,
# Application ID, taken from facebook.com
"aid": 219994525426954,
# MQTT extension by FB, allows making a SUBSCRIBE while CONNECTing
"st": topics,
# MQTT extension by FB, allows making a PUBLISH while CONNECTing
# Using this is more efficient, but the same can be acheived with:
# def on_connect(*args):
# mqtt.publish(topic, payload, qos=1)
# mqtt.on_connect = on_connect
# TODO: For some reason this doesn't work!
"pm": [
# {
# "topic": topic,
# "payload": payload,
# "qos": 1,
# "messageId": 65536,
# }
],
# Unknown parameters
"cp": 3,
"ecp": 10,
"ct": "websocket",
"mqtt_sid": "",
"dc": "",
"no_auto_fg": True,
"gas": None,
"pack": [],
}
# TODO: Make this thread safe
self._mqtt.username_pw_set(_util.json_minimal(username))
headers = {
# TODO: Make this access thread safe
"Cookie": _util.get_cookie_header(
self._state._session, "https://edge-chat.facebook.com/chat"
),
"User-Agent": self._state._session.headers["User-Agent"],
"Origin": "https://www.facebook.com",
"Host": self._HOST,
}
self._mqtt.ws_set_options(
path="/chat?sid={}".format(session_id), headers=headers
)
def loop_once(self, on_error=None):
"""Run the listening loop once.
Returns whether to keep listening or not.
"""
rc = self._mqtt.loop(timeout=1.0)
# If disconnect() has been called
if self._mqtt._state == paho.mqtt.client.mqtt_cs_disconnecting:
return False # Stop listening
if rc != paho.mqtt.client.MQTT_ERR_SUCCESS:
# If known/expected error
if rc == paho.mqtt.client.MQTT_ERR_CONN_LOST:
log.warning("Connection lost, retrying")
elif rc == paho.mqtt.client.MQTT_ERR_NOMEM:
# This error is wrongly classified
# See https://github.com/eclipse/paho.mqtt.python/issues/340
log.warning("Connection error, retrying")
elif rc == paho.mqtt.client.MQTT_ERR_CONN_REFUSED:
raise _exception.FBchatNotLoggedIn("MQTT connection refused")
else:
err = paho.mqtt.client.error_string(rc)
log.error("MQTT Error: %s", err)
# For backwards compatibility
if on_error:
on_error(_exception.FBchatException("MQTT Error {}".format(err)))
# Wait before reconnecting
self._mqtt._reconnect_wait()
# Try reconnecting
self._configure_connect_options()
try:
self._mqtt.reconnect()
except (
# Taken from .loop_forever
paho.mqtt.client.socket.error,
OSError,
paho.mqtt.client.WebsocketConnectionError,
) as e:
log.debug("MQTT reconnection failed: %s", e)
return True # Keep listening
def disconnect(self):
self._mqtt.disconnect()
def set_foreground(self, value):
payload = _util.json_minimal({"foreground": value})
info = self._mqtt.publish("/foreground_state", payload=payload, qos=1)
self._foreground = value
# TODO: We can't wait for this, since the loop is running with .loop_forever()
# info.wait_for_publish()
def set_chat_on(self, value):
# TODO: Is this the right request to make?
data = {"make_user_available_when_in_foreground": value}
payload = _util.json_minimal(data)
info = self._mqtt.publish("/set_client_settings", payload=payload, qos=1)
self._chat_on = value
# TODO: We can't wait for this, since the loop is running with .loop_forever()
# info.wait_for_publish()
# def send_additional_contacts(self, additional_contacts):
# payload = _util.json_minimal({"additional_contacts": additional_contacts})
# info = self._mqtt.publish("/send_additional_contacts", payload=payload, qos=1)
#
# def browser_close(self):
# info = self._mqtt.publish("/browser_close", payload=b"{}", qos=1)

View File

@@ -7,11 +7,19 @@ import re
import requests import requests
import random import random
from . import _util, _exception from . import _graphql, _util, _exception
FB_DTSG_REGEX = re.compile(r'name="fb_dtsg" value="(.*?)"') FB_DTSG_REGEX = re.compile(r'name="fb_dtsg" value="(.*?)"')
def get_user_id(session):
# TODO: Optimize this `.get_dict()` call!
rtn = session.cookies.get_dict().get("c_user")
if rtn is None:
raise _exception.FBchatException("Could not find user id")
return str(rtn)
def find_input_fields(html): def find_input_fields(html):
return bs4.BeautifulSoup(html, "html.parser", parse_only=bs4.SoupStrainer("input")) return bs4.BeautifulSoup(html, "html.parser", parse_only=bs4.SoupStrainer("input"))
@@ -19,11 +27,17 @@ def find_input_fields(html):
def session_factory(user_agent=None): def session_factory(user_agent=None):
session = requests.session() session = requests.session()
session.headers["Referer"] = "https://www.facebook.com" session.headers["Referer"] = "https://www.facebook.com"
session.headers["Accept"] = "text/html"
# TODO: Deprecate setting the user agent manually # TODO: Deprecate setting the user agent manually
session.headers["User-Agent"] = user_agent or random.choice(_util.USER_AGENTS) session.headers["User-Agent"] = user_agent or random.choice(_util.USER_AGENTS)
return session return session
def client_id_factory():
return hex(int(random.random() * 2 ** 31))[2:]
def is_home(url): def is_home(url):
parts = _util.urlparse(url) parts = _util.urlparse(url)
# Check the urls `/home.php` and `/` # Check the urls `/home.php` and `/`
@@ -91,25 +105,21 @@ def _2fa_helper(session, code, r):
class State(object): class State(object):
"""Stores and manages state required for most Facebook requests.""" """Stores and manages state required for most Facebook requests."""
fb_dtsg = attr.ib() user_id = attr.ib()
_fb_dtsg = attr.ib()
_revision = attr.ib() _revision = attr.ib()
_session = attr.ib(factory=session_factory) _session = attr.ib(factory=session_factory)
_counter = attr.ib(0) _counter = attr.ib(0)
_client_id = attr.ib(factory=client_id_factory)
_logout_h = attr.ib(None) _logout_h = attr.ib(None)
def get_user_id(self):
rtn = self.get_cookies().get("c_user")
if rtn is None:
return None
return str(rtn)
def get_params(self): def get_params(self):
self._counter += 1 # TODO: Make this operation atomic / thread-safe self._counter += 1 # TODO: Make this operation atomic / thread-safe
return { return {
"__a": 1, "__a": 1,
"__req": _util.str_base(self._counter, 36), "__req": _util.str_base(self._counter, 36),
"__rev": self._revision, "__rev": self._revision,
"fb_dtsg": self.fb_dtsg, "fb_dtsg": self._fb_dtsg,
} }
@classmethod @classmethod
@@ -163,6 +173,9 @@ class State(object):
@classmethod @classmethod
def from_session(cls, session): def from_session(cls, session):
# TODO: Automatically set user_id when the cookie changes in the session
user_id = get_user_id(session)
r = session.get(_util.prefix_url("/")) r = session.get(_util.prefix_url("/"))
soup = find_input_fields(r.text) soup = find_input_fields(r.text)
@@ -180,7 +193,11 @@ class State(object):
logout_h = logout_h_element["value"] if logout_h_element else None logout_h = logout_h_element["value"] if logout_h_element else None
return cls( return cls(
fb_dtsg=fb_dtsg, revision=revision, session=session, logout_h=logout_h user_id=user_id,
fb_dtsg=fb_dtsg,
revision=revision,
session=session,
logout_h=logout_h,
) )
def get_cookies(self): def get_cookies(self):
@@ -191,3 +208,126 @@ class State(object):
session = session_factory(user_agent=user_agent) session = session_factory(user_agent=user_agent)
session.cookies = requests.cookies.merge_cookies(session.cookies, cookies) session.cookies = requests.cookies.merge_cookies(session.cookies, cookies)
return cls.from_session(session=session) return cls.from_session(session=session)
def _do_refresh(self):
# TODO: Raise the error instead, and make the user do the refresh manually
# It may be a bad idea to do this in an exception handler, if you have a better method, please suggest it!
_util.log.warning("Refreshing state and resending request")
new = State.from_session(session=self._session)
self.user_id = new.user_id
self._fb_dtsg = new._fb_dtsg
self._revision = new._revision
self._counter = new._counter
self._logout_h = new._logout_h or self._logout_h
def _get(self, url, params, error_retries=3):
params.update(self.get_params())
r = self._session.get(_util.prefix_url(url), params=params)
content = _util.check_request(r)
j = _util.to_json(content)
try:
_util.handle_payload_error(j)
except _exception.FBchatPleaseRefresh:
if error_retries > 0:
self._do_refresh()
return self._get(url, params, error_retries=error_retries - 1)
raise
return j
def _post(self, url, data, files=None, as_graphql=False, error_retries=3):
data.update(self.get_params())
r = self._session.post(_util.prefix_url(url), data=data, files=files)
content = _util.check_request(r)
try:
if as_graphql:
return _graphql.response_to_json(content)
else:
j = _util.to_json(content)
# TODO: Remove this, and move it to _payload_post instead
# We can't yet, since errors raised in here need to be caught below
_util.handle_payload_error(j)
return j
except _exception.FBchatPleaseRefresh:
if error_retries > 0:
self._do_refresh()
return self._post(
url,
data,
files=files,
as_graphql=as_graphql,
error_retries=error_retries - 1,
)
raise
def _payload_post(self, url, data, files=None):
j = self._post(url, data, files=files)
try:
return j["payload"]
except (KeyError, TypeError):
raise _exception.FBchatException("Missing payload: {}".format(j))
def _graphql_requests(self, *queries):
data = {
"method": "GET",
"response_format": "json",
"queries": _graphql.queries_to_json(*queries),
}
return self._post("/api/graphqlbatch/", data, as_graphql=True)
def _upload(self, files, voice_clip=False):
"""Upload files to Facebook.
`files` should be a list of files that requests can upload, see
`requests.request <https://docs.python-requests.org/en/master/api/#requests.request>`_.
Return a list of tuples with a file's ID and mimetype.
"""
file_dict = {"upload_{}".format(i): f for i, f in enumerate(files)}
data = {"voice_clip": voice_clip}
j = self._payload_post(
"https://upload.facebook.com/ajax/mercury/upload.php", data, files=file_dict
)
if len(j["metadata"]) != len(files):
raise _exception.FBchatException(
"Some files could not be uploaded: {}, {}".format(j, files)
)
return [
(data[_util.mimetype_to_key(data["filetype"])], data["filetype"])
for data in j["metadata"]
]
def _do_send_request(self, data):
offline_threading_id = _util.generateOfflineThreadingID()
data["client"] = "mercury"
data["author"] = "fbid:{}".format(self.user_id)
data["timestamp"] = _util.now()
data["source"] = "source:chat:web"
data["offline_threading_id"] = offline_threading_id
data["message_id"] = offline_threading_id
data["threading_id"] = _util.generateMessageID(self._client_id)
data["ephemeral_ttl_mode:"] = "0"
j = self._post("/messaging/send/", data)
# update JS token if received in response
fb_dtsg = _util.get_jsmods_require(j, 2)
if fb_dtsg is not None:
self._fb_dtsg = fb_dtsg
try:
message_ids = [
(action["message_id"], action["thread_fbid"])
for action in j["payload"]["actions"]
if "message_id" in action
]
if len(message_ids) != 1:
log.warning("Got multiple message ids' back: {}".format(message_ids))
return message_ids[0]
except (KeyError, IndexError, TypeError) as e:
raise _exception.FBchatException(
"Error when sending message: "
"No message IDs could be found: {}".format(j)
)

View File

@@ -16,6 +16,17 @@ class ThreadType(Enum):
ROOM = 2 ROOM = 2
PAGE = 3 PAGE = 3
def _to_class(self):
"""Convert this enum value to the corresponding class."""
from . import _user, _group, _page
return {
ThreadType.USER: _user.User,
ThreadType.GROUP: _group.Group,
ThreadType.ROOM: _group.Room,
ThreadType.PAGE: _page.Page,
}[self]
class ThreadLocation(Enum): class ThreadLocation(Enum):
"""Used to specify where a thread is located (inbox, pending, archived, other).""" """Used to specify where a thread is located (inbox, pending, archived, other)."""
@@ -130,3 +141,7 @@ class Thread(object):
else: else:
rtn["own_nickname"] = pc[1].get("nickname") rtn["own_nickname"] = pc[1].get("nickname")
return rtn return rtn
def _to_send_data(self):
# TODO: Only implement this in subclasses
return {"other_user_fbid": self.uid}

View File

@@ -192,17 +192,6 @@ class ActiveStatus(object):
in_game = attr.ib(None) in_game = attr.ib(None)
@classmethod @classmethod
def _from_chatproxy_presence(cls, id_, data): def _from_orca_presence(cls, data):
return cls( # TODO: Handle `c` and `vc` keys (Probably some binary data)
active=data["p"] in [2, 3] if "p" in data else None, return cls(active=data["p"] in [2, 3], last_active=data.get("l"), in_game=None)
last_active=data.get("lat"),
in_game=int(id_) in data.get("gamers", {}),
)
@classmethod
def _from_buddylist_overlay(cls, data, in_game=None):
return cls(
active=data["a"] in [2, 3] if "a" in data else None,
last_active=data.get("la"),
in_game=None,
)

View File

@@ -57,6 +57,11 @@ def now():
return int(time() * 1000) return int(time() * 1000)
def json_minimal(data):
"""Get JSON data in minimal form."""
return json.dumps(data, separators=(",", ":"))
def strip_json_cruft(text): def strip_json_cruft(text):
"""Removes `for(;;);` (and other cruft) that preceeds JSON responses.""" """Removes `for(;;);` (and other cruft) that preceeds JSON responses."""
try: try:
@@ -65,6 +70,14 @@ def strip_json_cruft(text):
raise FBchatException("No JSON object found: {!r}".format(text)) raise FBchatException("No JSON object found: {!r}".format(text))
def get_cookie_header(session, url):
"""Extract a cookie header from a requests session."""
# The cookies are extracted this way to make sure they're escaped correctly
return requests.cookies.get_cookie_header(
session.cookies, requests.Request("GET", url),
)
def get_decoded_r(r): def get_decoded_r(r):
return get_decoded(r._content) return get_decoded(r._content)
@@ -219,11 +232,12 @@ def get_files_from_urls(file_urls):
r = requests.get(file_url) r = requests.get(file_url)
# We could possibly use r.headers.get('Content-Disposition'), see # We could possibly use r.headers.get('Content-Disposition'), see
# https://stackoverflow.com/a/37060758 # https://stackoverflow.com/a/37060758
file_name = basename(file_url).split("?")[0].split("#")[0]
files.append( files.append(
( (
basename(file_url).split("?")[0].split("#")[0], file_name,
r.content, r.content,
r.headers.get("Content-Type") or guess_type(file_url)[0], r.headers.get("Content-Type") or guess_type(file_name)[0],
) )
) )
return files return files

View File

@@ -17,6 +17,7 @@ requires = [
"attrs>=18.2", "attrs>=18.2",
"requests~=2.19", "requests~=2.19",
"beautifulsoup4~=4.0", "beautifulsoup4~=4.0",
"paho-mqtt~=1.5",
] ]
description-file = "README.rst" description-file = "README.rst"
classifiers = [ classifiers = [

View File

@@ -27,7 +27,7 @@ def test_fetch_threads(client1):
@pytest.mark.parametrize("emoji, emoji_size", EMOJI_LIST) @pytest.mark.parametrize("emoji, emoji_size", EMOJI_LIST)
def test_fetch_message_emoji(client, emoji, emoji_size): def test_fetch_message_emoji(client, emoji, emoji_size):
mid = client.sendEmoji(emoji, emoji_size) mid = client.sendEmoji(emoji, emoji_size)
message, = client.fetchThreadMessages(limit=1) (message,) = client.fetchThreadMessages(limit=1)
assert subset( assert subset(
vars(message), uid=mid, author=client.uid, text=emoji, emoji_size=emoji_size vars(message), uid=mid, author=client.uid, text=emoji, emoji_size=emoji_size
@@ -46,7 +46,7 @@ def test_fetch_message_info_emoji(client, thread, emoji, emoji_size):
def test_fetch_message_mentions(client, thread, message_with_mentions): def test_fetch_message_mentions(client, thread, message_with_mentions):
mid = client.send(message_with_mentions) mid = client.send(message_with_mentions)
message, = client.fetchThreadMessages(limit=1) (message,) = client.fetchThreadMessages(limit=1)
assert subset( assert subset(
vars(message), uid=mid, author=client.uid, text=message_with_mentions.text vars(message), uid=mid, author=client.uid, text=message_with_mentions.text
@@ -71,7 +71,7 @@ def test_fetch_message_info_mentions(client, thread, message_with_mentions):
@pytest.mark.parametrize("sticker", STICKER_LIST) @pytest.mark.parametrize("sticker", STICKER_LIST)
def test_fetch_message_sticker(client, sticker): def test_fetch_message_sticker(client, sticker):
mid = client.send(Message(sticker=sticker)) mid = client.send(Message(sticker=sticker))
message, = client.fetchThreadMessages(limit=1) (message,) = client.fetchThreadMessages(limit=1)
assert subset(vars(message), uid=mid, author=client.uid) assert subset(vars(message), uid=mid, author=client.uid)
assert subset(vars(message.sticker), uid=sticker.uid) assert subset(vars(message.sticker), uid=sticker.uid)
@@ -96,6 +96,6 @@ def test_fetch_info(client1, group):
def test_fetch_image_url(client): def test_fetch_image_url(client):
client.sendLocalFiles([path.join(path.dirname(__file__), "resources", "image.png")]) client.sendLocalFiles([path.join(path.dirname(__file__), "resources", "image.png")])
message, = client.fetchThreadMessages(limit=1) (message,) = client.fetchThreadMessages(limit=1)
assert client.fetchImageUrl(message.attachments[0].uid) assert client.fetchImageUrl(message.attachments[0].uid)

View File

@@ -19,5 +19,5 @@ def test_delete_messages(client):
mid1 = client.sendMessage(text1) mid1 = client.sendMessage(text1)
mid2 = client.sendMessage(text2) mid2 = client.sendMessage(text2)
client.deleteMessages(mid2) client.deleteMessages(mid2)
message, = client.fetchThreadMessages(limit=1) (message,) = client.fetchThreadMessages(limit=1)
assert subset(vars(message), uid=mid1, author=client.uid, text=text1) assert subset(vars(message), uid=mid1, author=client.uid, text=text1)

View File

@@ -63,7 +63,7 @@ def test_create_poll(client1, group, catch_event, poll_data):
for recv_option in event[ for recv_option in event[
"poll" "poll"
].options: # The recieved options may not be the full list ].options: # The recieved options may not be the full list
old_option, = list(filter(lambda o: o.text == recv_option.text, poll.options)) (old_option,) = list(filter(lambda o: o.text == recv_option.text, poll.options))
voters = [client1.uid] if old_option.vote else [] voters = [client1.uid] if old_option.vote else []
assert subset( assert subset(
vars(recv_option), voters=voters, votes_count=len(voters), vote=False vars(recv_option), voters=voters, votes_count=len(voters), vote=False