Clean up imports
This commit is contained in:
@@ -3,6 +3,7 @@
|
|||||||
:copyright: (c) 2015 - 2019 by Taehoon Kim
|
:copyright: (c) 2015 - 2019 by Taehoon Kim
|
||||||
:license: BSD 3-Clause, see LICENSE for more details.
|
:license: BSD 3-Clause, see LICENSE for more details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging as _logging
|
import logging as _logging
|
||||||
|
|
||||||
# Set default logging handler to avoid "No handler found" warnings.
|
# Set default logging handler to avoid "No handler found" warnings.
|
||||||
@@ -12,7 +13,6 @@ _logging.getLogger(__name__).addHandler(_logging.NullHandler())
|
|||||||
from .models import *
|
from .models import *
|
||||||
|
|
||||||
from ._client import Client
|
from ._client import Client
|
||||||
from ._util import log # TODO: Remove this (from examples too)
|
|
||||||
|
|
||||||
__title__ = "fbchat"
|
__title__ = "fbchat"
|
||||||
__version__ = "1.8.1"
|
__version__ = "1.8.1"
|
||||||
|
@@ -1,16 +1,11 @@
|
|||||||
import requests
|
|
||||||
import urllib
|
|
||||||
from uuid import uuid1
|
|
||||||
from random import choice
|
|
||||||
from bs4 import BeautifulSoup as bs
|
|
||||||
from mimetypes import guess_type
|
|
||||||
from collections import OrderedDict
|
|
||||||
from ._util import *
|
|
||||||
from .models import *
|
|
||||||
from . import _graphql
|
|
||||||
from ._state import State
|
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
|
import requests
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from ._core import log
|
||||||
|
from . import _util, _graphql, _state
|
||||||
|
from .models import *
|
||||||
|
|
||||||
|
|
||||||
ACONTEXT = {
|
ACONTEXT = {
|
||||||
@@ -156,7 +151,9 @@ class Client:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Load cookies into current session
|
# Load cookies into current session
|
||||||
self._state = State.from_cookies(session_cookies, user_agent=user_agent)
|
self._state = _state.State.from_cookies(
|
||||||
|
session_cookies, user_agent=user_agent
|
||||||
|
)
|
||||||
self._uid = self._state.user_id
|
self._uid = self._state.user_id
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.exception("Failed loading session")
|
log.exception("Failed loading session")
|
||||||
@@ -186,7 +183,7 @@ class Client:
|
|||||||
|
|
||||||
for i in range(1, max_tries + 1):
|
for i in range(1, max_tries + 1):
|
||||||
try:
|
try:
|
||||||
self._state = State.login(
|
self._state = _state.State.login(
|
||||||
email,
|
email,
|
||||||
password,
|
password,
|
||||||
on_2fa_callback=self.on2FACode,
|
on_2fa_callback=self.on2FACode,
|
||||||
@@ -871,7 +868,7 @@ class Client:
|
|||||||
data = {"photo_id": str(image_id)}
|
data = {"photo_id": str(image_id)}
|
||||||
j = self._post("/mercury/attachments/photo/", data)
|
j = self._post("/mercury/attachments/photo/", data)
|
||||||
|
|
||||||
url = get_jsmods_require(j, 3)
|
url = _util.get_jsmods_require(j, 3)
|
||||||
if url is None:
|
if url is None:
|
||||||
raise FBchatException("Could not fetch image URL from: {}".format(j))
|
raise FBchatException("Could not fetch image URL from: {}".format(j))
|
||||||
return url
|
return url
|
||||||
@@ -1218,7 +1215,7 @@ class Client:
|
|||||||
data["has_attachment"] = True
|
data["has_attachment"] = True
|
||||||
|
|
||||||
for i, (file_id, mimetype) in enumerate(files):
|
for i, (file_id, mimetype) in enumerate(files):
|
||||||
data["{}s[{}]".format(mimetype_to_key(mimetype), i)] = file_id
|
data["{}s[{}]".format(_util.mimetype_to_key(mimetype), i)] = file_id
|
||||||
|
|
||||||
return self._doSendRequest(data)
|
return self._doSendRequest(data)
|
||||||
|
|
||||||
@@ -1239,8 +1236,8 @@ class Client:
|
|||||||
Raises:
|
Raises:
|
||||||
FBchatException: If request failed
|
FBchatException: If request failed
|
||||||
"""
|
"""
|
||||||
file_urls = require_list(file_urls)
|
file_urls = _util.require_list(file_urls)
|
||||||
files = self._upload(get_files_from_urls(file_urls))
|
files = self._upload(_util.get_files_from_urls(file_urls))
|
||||||
return self._sendFiles(
|
return self._sendFiles(
|
||||||
files=files, message=message, thread_id=thread_id, thread_type=thread_type
|
files=files, message=message, thread_id=thread_id, thread_type=thread_type
|
||||||
)
|
)
|
||||||
@@ -1262,8 +1259,8 @@ class Client:
|
|||||||
Raises:
|
Raises:
|
||||||
FBchatException: If request failed
|
FBchatException: If request failed
|
||||||
"""
|
"""
|
||||||
file_paths = require_list(file_paths)
|
file_paths = _util.require_list(file_paths)
|
||||||
with get_files_from_paths(file_paths) as x:
|
with _util.get_files_from_paths(file_paths) as x:
|
||||||
files = self._upload(x)
|
files = self._upload(x)
|
||||||
return self._sendFiles(
|
return self._sendFiles(
|
||||||
files=files, message=message, thread_id=thread_id, thread_type=thread_type
|
files=files, message=message, thread_id=thread_id, thread_type=thread_type
|
||||||
@@ -1286,8 +1283,8 @@ class Client:
|
|||||||
Raises:
|
Raises:
|
||||||
FBchatException: If request failed
|
FBchatException: If request failed
|
||||||
"""
|
"""
|
||||||
clip_urls = require_list(clip_urls)
|
clip_urls = _util.require_list(clip_urls)
|
||||||
files = self._upload(get_files_from_urls(clip_urls), voice_clip=True)
|
files = self._upload(_util.get_files_from_urls(clip_urls), voice_clip=True)
|
||||||
return self._sendFiles(
|
return self._sendFiles(
|
||||||
files=files, message=message, thread_id=thread_id, thread_type=thread_type
|
files=files, message=message, thread_id=thread_id, thread_type=thread_type
|
||||||
)
|
)
|
||||||
@@ -1309,8 +1306,8 @@ class Client:
|
|||||||
Raises:
|
Raises:
|
||||||
FBchatException: If request failed
|
FBchatException: If request failed
|
||||||
"""
|
"""
|
||||||
clip_paths = require_list(clip_paths)
|
clip_paths = _util.require_list(clip_paths)
|
||||||
with get_files_from_paths(clip_paths) as x:
|
with _util.get_files_from_paths(clip_paths) as x:
|
||||||
files = self._upload(x, voice_clip=True)
|
files = self._upload(x, voice_clip=True)
|
||||||
return self._sendFiles(
|
return self._sendFiles(
|
||||||
files=files, message=message, thread_id=thread_id, thread_type=thread_type
|
files=files, message=message, thread_id=thread_id, thread_type=thread_type
|
||||||
@@ -1371,7 +1368,7 @@ class Client:
|
|||||||
thread_id, thread_type = self._getThread(thread_id, None)
|
thread_id, thread_type = self._getThread(thread_id, None)
|
||||||
data = {
|
data = {
|
||||||
"attachment_id": attachment_id,
|
"attachment_id": attachment_id,
|
||||||
"recipient_map[{}]".format(generateOfflineThreadingID()): thread_id,
|
"recipient_map[{}]".format(_util.generateOfflineThreadingID()): thread_id,
|
||||||
}
|
}
|
||||||
j = self._payload_post("/mercury/attachments/forward/", data)
|
j = self._payload_post("/mercury/attachments/forward/", data)
|
||||||
if not j.get("success"):
|
if not j.get("success"):
|
||||||
@@ -1424,7 +1421,7 @@ class Client:
|
|||||||
data["action_type"] = "ma-type:log-message"
|
data["action_type"] = "ma-type:log-message"
|
||||||
data["log_message_type"] = "log:subscribe"
|
data["log_message_type"] = "log:subscribe"
|
||||||
|
|
||||||
user_ids = require_list(user_ids)
|
user_ids = _util.require_list(user_ids)
|
||||||
|
|
||||||
for i, user_id in enumerate(user_ids):
|
for i, user_id in enumerate(user_ids):
|
||||||
if user_id == self._uid:
|
if user_id == self._uid:
|
||||||
@@ -1458,7 +1455,7 @@ class Client:
|
|||||||
|
|
||||||
data = {"add": admin, "thread_fbid": thread_id}
|
data = {"add": admin, "thread_fbid": thread_id}
|
||||||
|
|
||||||
admin_ids = require_list(admin_ids)
|
admin_ids = _util.require_list(admin_ids)
|
||||||
|
|
||||||
for i, admin_id in enumerate(admin_ids):
|
for i, admin_id in enumerate(admin_ids):
|
||||||
data["admin_ids[{}]".format(i)] = str(admin_id)
|
data["admin_ids[{}]".format(i)] = str(admin_id)
|
||||||
@@ -1507,7 +1504,7 @@ class Client:
|
|||||||
def _usersApproval(self, user_ids, approve, thread_id=None):
|
def _usersApproval(self, user_ids, approve, thread_id=None):
|
||||||
thread_id, thread_type = self._getThread(thread_id, None)
|
thread_id, thread_type = self._getThread(thread_id, None)
|
||||||
|
|
||||||
user_ids = list(require_list(user_ids))
|
user_ids = _util.require_list(user_ids)
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"client_mutation_id": "0",
|
"client_mutation_id": "0",
|
||||||
@@ -1572,7 +1569,7 @@ class Client:
|
|||||||
Raises:
|
Raises:
|
||||||
FBchatException: If request failed
|
FBchatException: If request failed
|
||||||
"""
|
"""
|
||||||
(image_id, mimetype), = self._upload(get_files_from_urls([image_url]))
|
(image_id, mimetype), = self._upload(_util.get_files_from_urls([image_url]))
|
||||||
return self._changeGroupImage(image_id, thread_id)
|
return self._changeGroupImage(image_id, thread_id)
|
||||||
|
|
||||||
def changeGroupImageLocal(self, image_path, thread_id=None):
|
def changeGroupImageLocal(self, image_path, thread_id=None):
|
||||||
@@ -1585,7 +1582,7 @@ class Client:
|
|||||||
Raises:
|
Raises:
|
||||||
FBchatException: If request failed
|
FBchatException: If request failed
|
||||||
"""
|
"""
|
||||||
with get_files_from_paths([image_path]) as files:
|
with _util.get_files_from_paths([image_path]) as files:
|
||||||
(image_id, mimetype), = self._upload(files)
|
(image_id, mimetype), = self._upload(files)
|
||||||
|
|
||||||
return self._changeGroupImage(image_id, thread_id)
|
return self._changeGroupImage(image_id, thread_id)
|
||||||
@@ -1700,7 +1697,7 @@ class Client:
|
|||||||
}
|
}
|
||||||
data = {"doc_id": 1491398900900362, "variables": json.dumps({"data": data})}
|
data = {"doc_id": 1491398900900362, "variables": json.dumps({"data": data})}
|
||||||
j = self._payload_post("/webgraphql/mutation", data)
|
j = self._payload_post("/webgraphql/mutation", data)
|
||||||
handle_graphql_errors(j)
|
_util.handle_graphql_errors(j)
|
||||||
|
|
||||||
def createPlan(self, plan, thread_id=None):
|
def createPlan(self, plan, thread_id=None):
|
||||||
"""Set a plan.
|
"""Set a plan.
|
||||||
@@ -1889,7 +1886,7 @@ class Client:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def _readStatus(self, read, thread_ids):
|
def _readStatus(self, read, thread_ids):
|
||||||
thread_ids = require_list(thread_ids)
|
thread_ids = _util.require_list(thread_ids)
|
||||||
|
|
||||||
data = {"watermarkTimestamp": now(), "shouldSendReadReceipt": "true"}
|
data = {"watermarkTimestamp": now(), "shouldSendReadReceipt": "true"}
|
||||||
|
|
||||||
@@ -2001,7 +1998,7 @@ class Client:
|
|||||||
Raises:
|
Raises:
|
||||||
FBchatException: If request failed
|
FBchatException: If request failed
|
||||||
"""
|
"""
|
||||||
thread_ids = require_list(thread_ids)
|
thread_ids = _util.require_list(thread_ids)
|
||||||
|
|
||||||
if location == ThreadLocation.PENDING:
|
if location == ThreadLocation.PENDING:
|
||||||
location = ThreadLocation.OTHER
|
location = ThreadLocation.OTHER
|
||||||
@@ -2037,7 +2034,7 @@ class Client:
|
|||||||
Raises:
|
Raises:
|
||||||
FBchatException: If request failed
|
FBchatException: If request failed
|
||||||
"""
|
"""
|
||||||
thread_ids = require_list(thread_ids)
|
thread_ids = _util.require_list(thread_ids)
|
||||||
|
|
||||||
data_unpin = dict()
|
data_unpin = dict()
|
||||||
data_delete = dict()
|
data_delete = dict()
|
||||||
@@ -2080,7 +2077,7 @@ class Client:
|
|||||||
Raises:
|
Raises:
|
||||||
FBchatException: If request failed
|
FBchatException: If request failed
|
||||||
"""
|
"""
|
||||||
message_ids = require_list(message_ids)
|
message_ids = _util.require_list(message_ids)
|
||||||
data = dict()
|
data = dict()
|
||||||
for i, message_id in enumerate(message_ids):
|
for i, message_id in enumerate(message_ids):
|
||||||
data["message_ids[{}]".format(i)] = message_id
|
data["message_ids[{}]".format(i)] = message_id
|
||||||
|
@@ -1,13 +1,12 @@
|
|||||||
import re
|
|
||||||
import json
|
import json
|
||||||
from time import time
|
import time
|
||||||
from random import random
|
import random
|
||||||
from contextlib import contextmanager
|
import contextlib
|
||||||
from mimetypes import guess_type
|
import mimetypes
|
||||||
from os.path import basename
|
import urllib.parse
|
||||||
from urllib.parse import parse_qs, urlparse
|
|
||||||
import warnings
|
|
||||||
import requests
|
import requests
|
||||||
|
from os import path
|
||||||
|
|
||||||
from ._core import log
|
from ._core import log
|
||||||
from ._exception import (
|
from ._exception import (
|
||||||
FBchatException,
|
FBchatException,
|
||||||
@@ -29,7 +28,7 @@ USER_AGENTS = [
|
|||||||
|
|
||||||
|
|
||||||
def now():
|
def now():
|
||||||
return int(time() * 1000)
|
return int(time.time() * 1000)
|
||||||
|
|
||||||
|
|
||||||
def strip_json_cruft(text):
|
def strip_json_cruft(text):
|
||||||
@@ -72,17 +71,17 @@ def str_base(number, base):
|
|||||||
|
|
||||||
def generateMessageID(client_id=None):
|
def generateMessageID(client_id=None):
|
||||||
k = now()
|
k = now()
|
||||||
l = int(random() * 4294967295)
|
l = int(random.random() * 4294967295)
|
||||||
return "<{}:{}-{}@mail.projektitan.com>".format(k, l, client_id)
|
return "<{}:{}-{}@mail.projektitan.com>".format(k, l, client_id)
|
||||||
|
|
||||||
|
|
||||||
def getSignatureID():
|
def getSignatureID():
|
||||||
return hex(int(random() * 2147483648))
|
return hex(int(random.random() * 2147483648))
|
||||||
|
|
||||||
|
|
||||||
def generateOfflineThreadingID():
|
def generateOfflineThreadingID():
|
||||||
ret = now()
|
ret = now()
|
||||||
value = int(random() * 4294967295)
|
value = int(random.random() * 4294967295)
|
||||||
string = ("0000000000000000000000" + format(value, "b"))[-22:]
|
string = ("0000000000000000000000" + format(value, "b"))[-22:]
|
||||||
msgs = format(ret, "b") + string
|
msgs = format(ret, "b") + string
|
||||||
return str(int(msgs, 2))
|
return str(int(msgs, 2))
|
||||||
@@ -196,20 +195,24 @@ def get_files_from_urls(file_urls):
|
|||||||
# https://stackoverflow.com/a/37060758
|
# https://stackoverflow.com/a/37060758
|
||||||
files.append(
|
files.append(
|
||||||
(
|
(
|
||||||
basename(file_url).split("?")[0].split("#")[0],
|
path.basename(file_url).split("?")[0].split("#")[0],
|
||||||
r.content,
|
r.content,
|
||||||
r.headers.get("Content-Type") or guess_type(file_url)[0],
|
r.headers.get("Content-Type") or mimetypes.guess_type(file_url)[0],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return files
|
return files
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextlib.contextmanager
|
||||||
def get_files_from_paths(filenames):
|
def get_files_from_paths(filenames):
|
||||||
files = []
|
files = []
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
files.append(
|
files.append(
|
||||||
(basename(filename), open(filename, "rb"), guess_type(filename)[0])
|
(
|
||||||
|
path.basename(filename),
|
||||||
|
open(filename, "rb"),
|
||||||
|
mimetypes.guess_type(filename)[0],
|
||||||
|
)
|
||||||
)
|
)
|
||||||
yield files
|
yield files
|
||||||
for fn, fp, ft in files:
|
for fn, fp, ft in files:
|
||||||
@@ -217,7 +220,7 @@ def get_files_from_paths(filenames):
|
|||||||
|
|
||||||
|
|
||||||
def get_url_parameters(url, *args):
|
def get_url_parameters(url, *args):
|
||||||
params = parse_qs(urlparse(url).query)
|
params = urllib.parse.parse_qs(urllib.parse.urlparse(url).query)
|
||||||
return [params[arg][0] for arg in args if params.get(arg)]
|
return [params[arg][0] for arg in args if params.get(arg)]
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user