Compare commits
46 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
881aa9adce | ||
|
4714be5697 | ||
|
cb7f4a72d7 | ||
|
fb63ff0db8 | ||
|
c5f447e20b | ||
|
b4d3769fd5 | ||
|
b199d597b2 | ||
|
debfb37a47 | ||
|
67fd6ffdf6 | ||
|
e57265016e | ||
|
cf4c22898c | ||
|
3bb99541e7 | ||
|
8c367af0ff | ||
|
bc1e3edf17 | ||
|
e488f4a7da | ||
|
afad38d8e1 | ||
|
e9804d4184 | ||
|
a1b80a7abb | ||
|
803bfa7084 | ||
|
d1cb866b44 | ||
|
a298e0cf16 | ||
|
766b0125fb | ||
|
998fa43fb2 | ||
|
ecc6edac5a | ||
|
ea518ba4c9 | ||
|
ffdf4222bf | ||
|
a97ef67411 | ||
|
813219cd9c | ||
|
bb1f7d9294 | ||
|
3d28c958d3 | ||
|
6b68916d74 | ||
|
12e752e681 | ||
|
1f342d0c71 | ||
|
5e86d4a48a | ||
|
0838f84859 | ||
|
abc938eacd | ||
|
4d13cd2c0b | ||
|
8f8971c706 | ||
|
2703d9513a | ||
|
3dce83de93 | ||
|
ef8e7d4251 | ||
|
a131e1ae73 | ||
|
84a86bd7bd | ||
|
adfb5886c9 | ||
|
8d237ea4ef | ||
|
513bc6eadf |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.8.0
|
||||
current_version = 1.9.3
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
|
@@ -13,7 +13,7 @@ from ._client import Client
|
||||
from ._util import log # TODO: Remove this (from examples too)
|
||||
|
||||
__title__ = "fbchat"
|
||||
__version__ = "1.8.0"
|
||||
__version__ = "1.9.3"
|
||||
__description__ = "Facebook Chat (Messenger) for Python"
|
||||
|
||||
__copyright__ = "Copyright 2015 - 2019 by Taehoon Kim"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -104,6 +104,9 @@ class Group(Thread):
|
||||
plan=plan,
|
||||
)
|
||||
|
||||
def _to_send_data(self):
|
||||
return {"thread_fbid": self.uid}
|
||||
|
||||
|
||||
@attr.s(cmp=False, init=False)
|
||||
class Room(Group):
|
||||
|
@@ -26,7 +26,7 @@ class EmojiSize(Enum):
|
||||
"s": cls.SMALL,
|
||||
}
|
||||
for tag in tags or ():
|
||||
data = tag.split(":", maxsplit=1)
|
||||
data = tag.split(":", 1)
|
||||
if len(data) > 1 and data[0] == "hot_emoji_size":
|
||||
return string_to_emojisize.get(data[1])
|
||||
return None
|
||||
@@ -151,6 +151,55 @@ class Message(object):
|
||||
return False
|
||||
return any(map(lambda tag: "forward" in tag or "copy" in tag, tags))
|
||||
|
||||
def _to_send_data(self):
|
||||
data = {}
|
||||
|
||||
if self.text or self.sticker or self.emoji_size:
|
||||
data["action_type"] = "ma-type:user-generated-message"
|
||||
|
||||
if self.text:
|
||||
data["body"] = self.text
|
||||
|
||||
for i, mention in enumerate(self.mentions):
|
||||
data["profile_xmd[{}][id]".format(i)] = mention.thread_id
|
||||
data["profile_xmd[{}][offset]".format(i)] = mention.offset
|
||||
data["profile_xmd[{}][length]".format(i)] = mention.length
|
||||
data["profile_xmd[{}][type]".format(i)] = "p"
|
||||
|
||||
if self.emoji_size:
|
||||
if self.text:
|
||||
data["tags[0]"] = "hot_emoji_size:" + self.emoji_size.name.lower()
|
||||
else:
|
||||
data["sticker_id"] = self.emoji_size.value
|
||||
|
||||
if self.sticker:
|
||||
data["sticker_id"] = self.sticker.uid
|
||||
|
||||
if self.quick_replies:
|
||||
xmd = {"quick_replies": []}
|
||||
for quick_reply in self.quick_replies:
|
||||
# TODO: Move this to `_quick_reply.py`
|
||||
q = dict()
|
||||
q["content_type"] = quick_reply._type
|
||||
q["payload"] = quick_reply.payload
|
||||
q["external_payload"] = quick_reply.external_payload
|
||||
q["data"] = quick_reply.data
|
||||
if quick_reply.is_response:
|
||||
q["ignore_for_webhook"] = False
|
||||
if isinstance(quick_reply, _quick_reply.QuickReplyText):
|
||||
q["title"] = quick_reply.title
|
||||
if not isinstance(quick_reply, _quick_reply.QuickReplyLocation):
|
||||
q["image_url"] = quick_reply.image_url
|
||||
xmd["quick_replies"].append(q)
|
||||
if len(self.quick_replies) == 1 and self.quick_replies[0].is_response:
|
||||
xmd["quick_replies"] = xmd["quick_replies"][0]
|
||||
data["platform_xmd"] = json.dumps(xmd)
|
||||
|
||||
if self.reply_to_id:
|
||||
data["replied_to_message_id"] = self.reply_to_id
|
||||
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def _from_graphql(cls, data):
|
||||
if data.get("message_sender") is None:
|
||||
|
317
fbchat/_mqtt.py
Normal file
317
fbchat/_mqtt.py
Normal file
@@ -0,0 +1,317 @@
|
||||
import attr
|
||||
import random
|
||||
import paho.mqtt.client
|
||||
from ._core import log
|
||||
from . import _util, _exception, _graphql
|
||||
|
||||
|
||||
def generate_session_id():
|
||||
"""Generate a random session ID between 1 and 9007199254740991."""
|
||||
return random.randint(1, 2 ** 53)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class Mqtt(object):
|
||||
_state = attr.ib()
|
||||
_mqtt = attr.ib()
|
||||
_on_message = attr.ib()
|
||||
_chat_on = attr.ib()
|
||||
_foreground = attr.ib()
|
||||
_sequence_id = attr.ib()
|
||||
_sync_token = attr.ib(None)
|
||||
|
||||
_HOST = "edge-chat.facebook.com"
|
||||
|
||||
@classmethod
|
||||
def connect(cls, state, on_message, chat_on, foreground):
|
||||
mqtt = paho.mqtt.client.Client(
|
||||
client_id="mqttwsclient",
|
||||
clean_session=True,
|
||||
protocol=paho.mqtt.client.MQTTv31,
|
||||
transport="websockets",
|
||||
)
|
||||
mqtt.enable_logger()
|
||||
# mqtt.max_inflight_messages_set(20) # The rest will get queued
|
||||
# mqtt.max_queued_messages_set(0) # Unlimited messages can be queued
|
||||
# mqtt.message_retry_set(20) # Retry sending for at least 20 seconds
|
||||
# mqtt.reconnect_delay_set(min_delay=1, max_delay=120)
|
||||
# TODO: Is region (lla | atn | odn | others?) important?
|
||||
mqtt.tls_set()
|
||||
|
||||
self = cls(
|
||||
state=state,
|
||||
mqtt=mqtt,
|
||||
on_message=on_message,
|
||||
chat_on=chat_on,
|
||||
foreground=foreground,
|
||||
sequence_id=cls._fetch_sequence_id(state),
|
||||
)
|
||||
|
||||
# Configure callbacks
|
||||
mqtt.on_message = self._on_message_handler
|
||||
mqtt.on_connect = self._on_connect_handler
|
||||
|
||||
self._configure_connect_options()
|
||||
|
||||
# Attempt to connect
|
||||
try:
|
||||
rc = mqtt.connect(self._HOST, 443, keepalive=10)
|
||||
except (
|
||||
# Taken from .loop_forever
|
||||
paho.mqtt.client.socket.error,
|
||||
OSError,
|
||||
paho.mqtt.client.WebsocketConnectionError,
|
||||
) as e:
|
||||
raise _exception.FBchatException("MQTT connection failed")
|
||||
|
||||
# Raise error if connecting failed
|
||||
if rc != paho.mqtt.client.MQTT_ERR_SUCCESS:
|
||||
err = paho.mqtt.client.error_string(rc)
|
||||
raise _exception.FBchatException("MQTT connection failed: {}".format(err))
|
||||
|
||||
return self
|
||||
|
||||
def _on_message_handler(self, client, userdata, message):
|
||||
# Parse payload JSON
|
||||
try:
|
||||
j = _util.parse_json(message.payload.decode("utf-8"))
|
||||
except (_exception.FBchatFacebookError, UnicodeDecodeError):
|
||||
log.exception("Failed parsing MQTT data on %s as JSON", message.topic)
|
||||
return
|
||||
|
||||
if message.topic == "/t_ms":
|
||||
# Update sync_token when received
|
||||
# This is received in the first message after we've created a messenger
|
||||
# sync queue.
|
||||
if "syncToken" in j and "firstDeltaSeqId" in j:
|
||||
self._sync_token = j["syncToken"]
|
||||
self._sequence_id = j["firstDeltaSeqId"]
|
||||
|
||||
# Update last sequence id when received
|
||||
if "lastIssuedSeqId" in j:
|
||||
self._sequence_id = j["lastIssuedSeqId"]
|
||||
|
||||
if "errorCode" in j:
|
||||
# Known types: ERROR_QUEUE_OVERFLOW | ERROR_QUEUE_NOT_FOUND
|
||||
# 'F\xfa\x84\x8c\x85\xf8\xbc-\x88 FB_PAGES_INSUFFICIENT_PERMISSION\x00'
|
||||
log.error("MQTT error code %s received", j["errorCode"])
|
||||
# TODO: Consider resetting the sync_token and sequence ID here?
|
||||
|
||||
log.debug("MQTT payload: %s, %s", message.topic, j)
|
||||
|
||||
# Call the external callback
|
||||
self._on_message(message.topic, j)
|
||||
|
||||
@staticmethod
|
||||
def _fetch_sequence_id(state):
|
||||
"""Fetch sequence ID."""
|
||||
params = {
|
||||
"limit": 1,
|
||||
"tags": ["INBOX"],
|
||||
"before": None,
|
||||
"includeDeliveryReceipts": False,
|
||||
"includeSeqID": True,
|
||||
}
|
||||
log.debug("Fetching MQTT sequence ID")
|
||||
# Same request as in `Client.fetchThreadList`
|
||||
(j,) = state._graphql_requests(_graphql.from_doc_id("1349387578499440", params))
|
||||
try:
|
||||
return int(j["viewer"]["message_threads"]["sync_sequence_id"])
|
||||
except (KeyError, ValueError):
|
||||
# TODO: Proper exceptions
|
||||
raise
|
||||
|
||||
def _on_connect_handler(self, client, userdata, flags, rc):
|
||||
if rc == 21:
|
||||
raise _exception.FBchatException(
|
||||
"Failed connecting. Maybe your cookies are wrong?"
|
||||
)
|
||||
if rc != 0:
|
||||
return # Don't try to send publish if the connection failed
|
||||
|
||||
# configure receiving messages.
|
||||
payload = {
|
||||
"sync_api_version": 10,
|
||||
"max_deltas_able_to_process": 1000,
|
||||
"delta_batch_size": 500,
|
||||
"encoding": "JSON",
|
||||
"entity_fbid": self._state.user_id,
|
||||
}
|
||||
|
||||
# If we don't have a sync_token, create a new messenger queue
|
||||
# This is done so that across reconnects, if we've received a sync token, we
|
||||
# SHOULD receive a piece of data in /t_ms exactly once!
|
||||
if self._sync_token is None:
|
||||
topic = "/messenger_sync_create_queue"
|
||||
payload["initial_titan_sequence_id"] = str(self._sequence_id)
|
||||
payload["device_params"] = None
|
||||
else:
|
||||
topic = "/messenger_sync_get_diffs"
|
||||
payload["last_seq_id"] = str(self._sequence_id)
|
||||
payload["sync_token"] = self._sync_token
|
||||
|
||||
self._mqtt.publish(topic, _util.json_minimal(payload), qos=1)
|
||||
|
||||
def _configure_connect_options(self):
|
||||
# Generate a new session ID on each reconnect
|
||||
session_id = generate_session_id()
|
||||
|
||||
topics = [
|
||||
# Things that happen in chats (e.g. messages)
|
||||
"/t_ms",
|
||||
# Group typing notifications
|
||||
"/thread_typing",
|
||||
# Private chat typing notifications
|
||||
"/orca_typing_notifications",
|
||||
# Active notifications
|
||||
"/orca_presence",
|
||||
# Other notifications not related to chats (e.g. friend requests)
|
||||
"/legacy_web",
|
||||
# Facebook's continuous error reporting/logging?
|
||||
"/br_sr",
|
||||
# Response to /br_sr
|
||||
"/sr_res",
|
||||
# TODO: Investigate the response from this! (A bunch of binary data)
|
||||
# "/t_p",
|
||||
# TODO: Find out what this does!
|
||||
"/webrtc",
|
||||
# TODO: Find out what this does!
|
||||
"/onevc",
|
||||
# TODO: Find out what this does!
|
||||
"/notify_disconnect",
|
||||
# Old, no longer active topics
|
||||
# These are here just in case something interesting pops up
|
||||
"/inbox",
|
||||
"/mercury",
|
||||
"/messaging_events",
|
||||
"/orca_message_notifications",
|
||||
"/pp",
|
||||
"/t_rtc",
|
||||
"/webrtc_response",
|
||||
]
|
||||
|
||||
username = {
|
||||
# The user ID
|
||||
"u": self._state.user_id,
|
||||
# Session ID
|
||||
"s": session_id,
|
||||
# Active status setting
|
||||
"chat_on": self._chat_on,
|
||||
# foreground_state - Whether the window is focused
|
||||
"fg": self._foreground,
|
||||
# Can be any random ID
|
||||
"d": self._state._client_id,
|
||||
# Application ID, taken from facebook.com
|
||||
"aid": 219994525426954,
|
||||
# MQTT extension by FB, allows making a SUBSCRIBE while CONNECTing
|
||||
"st": topics,
|
||||
# MQTT extension by FB, allows making a PUBLISH while CONNECTing
|
||||
# Using this is more efficient, but the same can be acheived with:
|
||||
# def on_connect(*args):
|
||||
# mqtt.publish(topic, payload, qos=1)
|
||||
# mqtt.on_connect = on_connect
|
||||
# TODO: For some reason this doesn't work!
|
||||
"pm": [
|
||||
# {
|
||||
# "topic": topic,
|
||||
# "payload": payload,
|
||||
# "qos": 1,
|
||||
# "messageId": 65536,
|
||||
# }
|
||||
],
|
||||
# Unknown parameters
|
||||
"cp": 3,
|
||||
"ecp": 10,
|
||||
"ct": "websocket",
|
||||
"mqtt_sid": "",
|
||||
"dc": "",
|
||||
"no_auto_fg": True,
|
||||
"gas": None,
|
||||
"pack": [],
|
||||
}
|
||||
|
||||
# TODO: Make this thread safe
|
||||
self._mqtt.username_pw_set(_util.json_minimal(username))
|
||||
|
||||
headers = {
|
||||
# TODO: Make this access thread safe
|
||||
"Cookie": _util.get_cookie_header(
|
||||
self._state._session, "https://edge-chat.facebook.com/chat"
|
||||
),
|
||||
"User-Agent": self._state._session.headers["User-Agent"],
|
||||
"Origin": "https://www.facebook.com",
|
||||
"Host": self._HOST,
|
||||
}
|
||||
|
||||
self._mqtt.ws_set_options(
|
||||
path="/chat?sid={}".format(session_id), headers=headers
|
||||
)
|
||||
|
||||
def loop_once(self, on_error=None):
|
||||
"""Run the listening loop once.
|
||||
|
||||
Returns whether to keep listening or not.
|
||||
"""
|
||||
rc = self._mqtt.loop(timeout=1.0)
|
||||
|
||||
# If disconnect() has been called
|
||||
if self._mqtt._state == paho.mqtt.client.mqtt_cs_disconnecting:
|
||||
return False # Stop listening
|
||||
|
||||
if rc != paho.mqtt.client.MQTT_ERR_SUCCESS:
|
||||
# If known/expected error
|
||||
if rc == paho.mqtt.client.MQTT_ERR_CONN_LOST:
|
||||
log.warning("Connection lost, retrying")
|
||||
elif rc == paho.mqtt.client.MQTT_ERR_NOMEM:
|
||||
# This error is wrongly classified
|
||||
# See https://github.com/eclipse/paho.mqtt.python/issues/340
|
||||
log.warning("Connection error, retrying")
|
||||
else:
|
||||
err = paho.mqtt.client.error_string(rc)
|
||||
log.error("MQTT Error: %s", err)
|
||||
# For backwards compatibility
|
||||
if on_error:
|
||||
on_error(_exception.FBchatException("MQTT Error {}".format(err)))
|
||||
|
||||
# Wait before reconnecting
|
||||
self._mqtt._reconnect_wait()
|
||||
|
||||
# Try reconnecting
|
||||
self._configure_connect_options()
|
||||
try:
|
||||
self._mqtt.reconnect()
|
||||
except (
|
||||
# Taken from .loop_forever
|
||||
paho.mqtt.client.socket.error,
|
||||
OSError,
|
||||
paho.mqtt.client.WebsocketConnectionError,
|
||||
) as e:
|
||||
log.debug("MQTT reconnection failed: %s", e)
|
||||
|
||||
return True # Keep listening
|
||||
|
||||
def disconnect(self):
|
||||
self._mqtt.disconnect()
|
||||
|
||||
def set_foreground(self, value):
|
||||
payload = _util.json_minimal({"foreground": value})
|
||||
info = self._mqtt.publish("/foreground_state", payload=payload, qos=1)
|
||||
self._foreground = value
|
||||
# TODO: We can't wait for this, since the loop is running with .loop_forever()
|
||||
# info.wait_for_publish()
|
||||
|
||||
def set_chat_on(self, value):
|
||||
# TODO: Is this the right request to make?
|
||||
data = {"make_user_available_when_in_foreground": value}
|
||||
payload = _util.json_minimal(data)
|
||||
info = self._mqtt.publish("/set_client_settings", payload=payload, qos=1)
|
||||
self._chat_on = value
|
||||
# TODO: We can't wait for this, since the loop is running with .loop_forever()
|
||||
# info.wait_for_publish()
|
||||
|
||||
# def send_additional_contacts(self, additional_contacts):
|
||||
# payload = _util.json_minimal({"additional_contacts": additional_contacts})
|
||||
# info = self._mqtt.publish("/send_additional_contacts", payload=payload, qos=1)
|
||||
#
|
||||
# def browser_close(self):
|
||||
# info = self._mqtt.publish("/browser_close", payload=b"{}", qos=1)
|
158
fbchat/_state.py
158
fbchat/_state.py
@@ -7,11 +7,19 @@ import re
|
||||
import requests
|
||||
import random
|
||||
|
||||
from . import _util, _exception
|
||||
from . import _graphql, _util, _exception
|
||||
|
||||
FB_DTSG_REGEX = re.compile(r'name="fb_dtsg" value="(.*?)"')
|
||||
|
||||
|
||||
def get_user_id(session):
|
||||
# TODO: Optimize this `.get_dict()` call!
|
||||
rtn = session.cookies.get_dict().get("c_user")
|
||||
if rtn is None:
|
||||
raise _exception.FBchatException("Could not find user id")
|
||||
return str(rtn)
|
||||
|
||||
|
||||
def find_input_fields(html):
|
||||
return bs4.BeautifulSoup(html, "html.parser", parse_only=bs4.SoupStrainer("input"))
|
||||
|
||||
@@ -24,6 +32,10 @@ def session_factory(user_agent=None):
|
||||
return session
|
||||
|
||||
|
||||
def client_id_factory():
|
||||
return hex(int(random.random() * 2 ** 31))[2:]
|
||||
|
||||
|
||||
def is_home(url):
|
||||
parts = _util.urlparse(url)
|
||||
# Check the urls `/home.php` and `/`
|
||||
@@ -91,25 +103,21 @@ def _2fa_helper(session, code, r):
|
||||
class State(object):
|
||||
"""Stores and manages state required for most Facebook requests."""
|
||||
|
||||
fb_dtsg = attr.ib()
|
||||
user_id = attr.ib()
|
||||
_fb_dtsg = attr.ib()
|
||||
_revision = attr.ib()
|
||||
_session = attr.ib(factory=session_factory)
|
||||
_counter = attr.ib(0)
|
||||
_client_id = attr.ib(factory=client_id_factory)
|
||||
_logout_h = attr.ib(None)
|
||||
|
||||
def get_user_id(self):
|
||||
rtn = self.get_cookies().get("c_user")
|
||||
if rtn is None:
|
||||
return None
|
||||
return str(rtn)
|
||||
|
||||
def get_params(self):
|
||||
self._counter += 1 # TODO: Make this operation atomic / thread-safe
|
||||
return {
|
||||
"__a": 1,
|
||||
"__req": _util.str_base(self._counter, 36),
|
||||
"__rev": self._revision,
|
||||
"fb_dtsg": self.fb_dtsg,
|
||||
"fb_dtsg": self._fb_dtsg,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -163,6 +171,9 @@ class State(object):
|
||||
|
||||
@classmethod
|
||||
def from_session(cls, session):
|
||||
# TODO: Automatically set user_id when the cookie changes in the session
|
||||
user_id = get_user_id(session)
|
||||
|
||||
r = session.get(_util.prefix_url("/"))
|
||||
|
||||
soup = find_input_fields(r.text)
|
||||
@@ -180,7 +191,11 @@ class State(object):
|
||||
logout_h = logout_h_element["value"] if logout_h_element else None
|
||||
|
||||
return cls(
|
||||
fb_dtsg=fb_dtsg, revision=revision, session=session, logout_h=logout_h
|
||||
user_id=user_id,
|
||||
fb_dtsg=fb_dtsg,
|
||||
revision=revision,
|
||||
session=session,
|
||||
logout_h=logout_h,
|
||||
)
|
||||
|
||||
def get_cookies(self):
|
||||
@@ -191,3 +206,126 @@ class State(object):
|
||||
session = session_factory(user_agent=user_agent)
|
||||
session.cookies = requests.cookies.merge_cookies(session.cookies, cookies)
|
||||
return cls.from_session(session=session)
|
||||
|
||||
def _do_refresh(self):
|
||||
# TODO: Raise the error instead, and make the user do the refresh manually
|
||||
# It may be a bad idea to do this in an exception handler, if you have a better method, please suggest it!
|
||||
_util.log.warning("Refreshing state and resending request")
|
||||
new = State.from_session(session=self._session)
|
||||
self.user_id = new.user_id
|
||||
self._fb_dtsg = new._fb_dtsg
|
||||
self._revision = new._revision
|
||||
self._counter = new._counter
|
||||
self._logout_h = new._logout_h or self._logout_h
|
||||
|
||||
def _get(self, url, params, error_retries=3):
|
||||
params.update(self.get_params())
|
||||
r = self._session.get(_util.prefix_url(url), params=params)
|
||||
content = _util.check_request(r)
|
||||
j = _util.to_json(content)
|
||||
try:
|
||||
_util.handle_payload_error(j)
|
||||
except _exception.FBchatPleaseRefresh:
|
||||
if error_retries > 0:
|
||||
self._do_refresh()
|
||||
return self._get(url, params, error_retries=error_retries - 1)
|
||||
raise
|
||||
return j
|
||||
|
||||
def _post(self, url, data, files=None, as_graphql=False, error_retries=3):
|
||||
data.update(self.get_params())
|
||||
r = self._session.post(_util.prefix_url(url), data=data, files=files)
|
||||
content = _util.check_request(r)
|
||||
try:
|
||||
if as_graphql:
|
||||
return _graphql.response_to_json(content)
|
||||
else:
|
||||
j = _util.to_json(content)
|
||||
# TODO: Remove this, and move it to _payload_post instead
|
||||
# We can't yet, since errors raised in here need to be caught below
|
||||
_util.handle_payload_error(j)
|
||||
return j
|
||||
except _exception.FBchatPleaseRefresh:
|
||||
if error_retries > 0:
|
||||
self._do_refresh()
|
||||
return self._post(
|
||||
url,
|
||||
data,
|
||||
files=files,
|
||||
as_graphql=as_graphql,
|
||||
error_retries=error_retries - 1,
|
||||
)
|
||||
raise
|
||||
|
||||
def _payload_post(self, url, data, files=None):
|
||||
j = self._post(url, data, files=files)
|
||||
try:
|
||||
return j["payload"]
|
||||
except (KeyError, TypeError):
|
||||
raise _exception.FBchatException("Missing payload: {}".format(j))
|
||||
|
||||
def _graphql_requests(self, *queries):
|
||||
data = {
|
||||
"method": "GET",
|
||||
"response_format": "json",
|
||||
"queries": _graphql.queries_to_json(*queries),
|
||||
}
|
||||
return self._post("/api/graphqlbatch/", data, as_graphql=True)
|
||||
|
||||
def _upload(self, files, voice_clip=False):
|
||||
"""Upload files to Facebook.
|
||||
|
||||
`files` should be a list of files that requests can upload, see
|
||||
`requests.request <https://docs.python-requests.org/en/master/api/#requests.request>`_.
|
||||
|
||||
Return a list of tuples with a file's ID and mimetype.
|
||||
"""
|
||||
file_dict = {"upload_{}".format(i): f for i, f in enumerate(files)}
|
||||
|
||||
data = {"voice_clip": voice_clip}
|
||||
|
||||
j = self._payload_post(
|
||||
"https://upload.facebook.com/ajax/mercury/upload.php", data, files=file_dict
|
||||
)
|
||||
|
||||
if len(j["metadata"]) != len(files):
|
||||
raise _exception.FBchatException(
|
||||
"Some files could not be uploaded: {}, {}".format(j, files)
|
||||
)
|
||||
|
||||
return [
|
||||
(data[_util.mimetype_to_key(data["filetype"])], data["filetype"])
|
||||
for data in j["metadata"]
|
||||
]
|
||||
|
||||
def _do_send_request(self, data):
|
||||
offline_threading_id = _util.generateOfflineThreadingID()
|
||||
data["client"] = "mercury"
|
||||
data["author"] = "fbid:{}".format(self.user_id)
|
||||
data["timestamp"] = _util.now()
|
||||
data["source"] = "source:chat:web"
|
||||
data["offline_threading_id"] = offline_threading_id
|
||||
data["message_id"] = offline_threading_id
|
||||
data["threading_id"] = _util.generateMessageID(self._client_id)
|
||||
data["ephemeral_ttl_mode:"] = "0"
|
||||
j = self._post("/messaging/send/", data)
|
||||
|
||||
# update JS token if received in response
|
||||
fb_dtsg = _util.get_jsmods_require(j, 2)
|
||||
if fb_dtsg is not None:
|
||||
self._fb_dtsg = fb_dtsg
|
||||
|
||||
try:
|
||||
message_ids = [
|
||||
(action["message_id"], action["thread_fbid"])
|
||||
for action in j["payload"]["actions"]
|
||||
if "message_id" in action
|
||||
]
|
||||
if len(message_ids) != 1:
|
||||
log.warning("Got multiple message ids' back: {}".format(message_ids))
|
||||
return message_ids[0]
|
||||
except (KeyError, IndexError, TypeError) as e:
|
||||
raise _exception.FBchatException(
|
||||
"Error when sending message: "
|
||||
"No message IDs could be found: {}".format(j)
|
||||
)
|
||||
|
@@ -16,6 +16,17 @@ class ThreadType(Enum):
|
||||
ROOM = 2
|
||||
PAGE = 3
|
||||
|
||||
def _to_class(self):
|
||||
"""Convert this enum value to the corresponding class."""
|
||||
from . import _user, _group, _page
|
||||
|
||||
return {
|
||||
ThreadType.USER: _user.User,
|
||||
ThreadType.GROUP: _group.Group,
|
||||
ThreadType.ROOM: _group.Room,
|
||||
ThreadType.PAGE: _page.Page,
|
||||
}[self]
|
||||
|
||||
|
||||
class ThreadLocation(Enum):
|
||||
"""Used to specify where a thread is located (inbox, pending, archived, other)."""
|
||||
@@ -130,3 +141,7 @@ class Thread(object):
|
||||
else:
|
||||
rtn["own_nickname"] = pc[1].get("nickname")
|
||||
return rtn
|
||||
|
||||
def _to_send_data(self):
|
||||
# TODO: Only implement this in subclasses
|
||||
return {"other_user_fbid": self.uid}
|
||||
|
@@ -192,17 +192,6 @@ class ActiveStatus(object):
|
||||
in_game = attr.ib(None)
|
||||
|
||||
@classmethod
|
||||
def _from_chatproxy_presence(cls, id_, data):
|
||||
return cls(
|
||||
active=data["p"] in [2, 3] if "p" in data else None,
|
||||
last_active=data.get("lat"),
|
||||
in_game=int(id_) in data.get("gamers", {}),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _from_buddylist_overlay(cls, data, in_game=None):
|
||||
return cls(
|
||||
active=data["a"] in [2, 3] if "a" in data else None,
|
||||
last_active=data.get("la"),
|
||||
in_game=None,
|
||||
)
|
||||
def _from_orca_presence(cls, data):
|
||||
# TODO: Handle `c` and `vc` keys (Probably some binary data)
|
||||
return cls(active=data["p"] in [2, 3], last_active=data.get("l"), in_game=None)
|
||||
|
@@ -57,6 +57,11 @@ def now():
|
||||
return int(time() * 1000)
|
||||
|
||||
|
||||
def json_minimal(data):
|
||||
"""Get JSON data in minimal form."""
|
||||
return json.dumps(data, separators=(",", ":"))
|
||||
|
||||
|
||||
def strip_json_cruft(text):
|
||||
"""Removes `for(;;);` (and other cruft) that preceeds JSON responses."""
|
||||
try:
|
||||
@@ -65,6 +70,14 @@ def strip_json_cruft(text):
|
||||
raise FBchatException("No JSON object found: {!r}".format(text))
|
||||
|
||||
|
||||
def get_cookie_header(session, url):
|
||||
"""Extract a cookie header from a requests session."""
|
||||
# The cookies are extracted this way to make sure they're escaped correctly
|
||||
return requests.cookies.get_cookie_header(
|
||||
session.cookies, requests.Request("GET", url),
|
||||
)
|
||||
|
||||
|
||||
def get_decoded_r(r):
|
||||
return get_decoded(r._content)
|
||||
|
||||
@@ -219,11 +232,12 @@ def get_files_from_urls(file_urls):
|
||||
r = requests.get(file_url)
|
||||
# We could possibly use r.headers.get('Content-Disposition'), see
|
||||
# https://stackoverflow.com/a/37060758
|
||||
file_name = basename(file_url).split("?")[0].split("#")[0]
|
||||
files.append(
|
||||
(
|
||||
basename(file_url).split("?")[0].split("#")[0],
|
||||
file_name,
|
||||
r.content,
|
||||
r.headers.get("Content-Type") or guess_type(file_url)[0],
|
||||
r.headers.get("Content-Type") or guess_type(file_name)[0],
|
||||
)
|
||||
)
|
||||
return files
|
||||
|
@@ -17,6 +17,7 @@ requires = [
|
||||
"attrs>=18.2",
|
||||
"requests~=2.19",
|
||||
"beautifulsoup4~=4.0",
|
||||
"paho-mqtt~=1.5",
|
||||
]
|
||||
description-file = "README.rst"
|
||||
classifiers = [
|
||||
|
@@ -27,7 +27,7 @@ def test_fetch_threads(client1):
|
||||
@pytest.mark.parametrize("emoji, emoji_size", EMOJI_LIST)
|
||||
def test_fetch_message_emoji(client, emoji, emoji_size):
|
||||
mid = client.sendEmoji(emoji, emoji_size)
|
||||
message, = client.fetchThreadMessages(limit=1)
|
||||
(message,) = client.fetchThreadMessages(limit=1)
|
||||
|
||||
assert subset(
|
||||
vars(message), uid=mid, author=client.uid, text=emoji, emoji_size=emoji_size
|
||||
@@ -46,7 +46,7 @@ def test_fetch_message_info_emoji(client, thread, emoji, emoji_size):
|
||||
|
||||
def test_fetch_message_mentions(client, thread, message_with_mentions):
|
||||
mid = client.send(message_with_mentions)
|
||||
message, = client.fetchThreadMessages(limit=1)
|
||||
(message,) = client.fetchThreadMessages(limit=1)
|
||||
|
||||
assert subset(
|
||||
vars(message), uid=mid, author=client.uid, text=message_with_mentions.text
|
||||
@@ -71,7 +71,7 @@ def test_fetch_message_info_mentions(client, thread, message_with_mentions):
|
||||
@pytest.mark.parametrize("sticker", STICKER_LIST)
|
||||
def test_fetch_message_sticker(client, sticker):
|
||||
mid = client.send(Message(sticker=sticker))
|
||||
message, = client.fetchThreadMessages(limit=1)
|
||||
(message,) = client.fetchThreadMessages(limit=1)
|
||||
|
||||
assert subset(vars(message), uid=mid, author=client.uid)
|
||||
assert subset(vars(message.sticker), uid=sticker.uid)
|
||||
@@ -96,6 +96,6 @@ def test_fetch_info(client1, group):
|
||||
|
||||
def test_fetch_image_url(client):
|
||||
client.sendLocalFiles([path.join(path.dirname(__file__), "resources", "image.png")])
|
||||
message, = client.fetchThreadMessages(limit=1)
|
||||
(message,) = client.fetchThreadMessages(limit=1)
|
||||
|
||||
assert client.fetchImageUrl(message.attachments[0].uid)
|
||||
|
@@ -19,5 +19,5 @@ def test_delete_messages(client):
|
||||
mid1 = client.sendMessage(text1)
|
||||
mid2 = client.sendMessage(text2)
|
||||
client.deleteMessages(mid2)
|
||||
message, = client.fetchThreadMessages(limit=1)
|
||||
(message,) = client.fetchThreadMessages(limit=1)
|
||||
assert subset(vars(message), uid=mid1, author=client.uid, text=text1)
|
||||
|
@@ -63,7 +63,7 @@ def test_create_poll(client1, group, catch_event, poll_data):
|
||||
for recv_option in event[
|
||||
"poll"
|
||||
].options: # The recieved options may not be the full list
|
||||
old_option, = list(filter(lambda o: o.text == recv_option.text, poll.options))
|
||||
(old_option,) = list(filter(lambda o: o.text == recv_option.text, poll.options))
|
||||
voters = [client1.uid] if old_option.vote else []
|
||||
assert subset(
|
||||
vars(recv_option), voters=voters, votes_count=len(voters), vote=False
|
||||
|
Reference in New Issue
Block a user