Add OpenAI conversational model

Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
This commit is contained in:
2023-03-29 01:27:43 +03:00
parent ccef6617bb
commit b3bfb60ef9
10 changed files with 244 additions and 123 deletions

View File

@@ -0,0 +1,94 @@
import fbchat
import openai
import tiktoken
from util.database import database
from util.logger import logger
from util.session import client
def count_tokens(message):
encoding = tiktoken.encoding_for_model("gpt-3.5-turbo")
return len(encoding.encode(message)) + 20
def check_safe(message):
response = openai.Moderation.create(
input=message,
)
return not response['results'][0]['flagged'], max(response['results'][0]['category_scores'])
def generate_prompt(event: fbchat.MessageEvent):
total_tokens = 0
system_prompt = [{
"role": "system",
"content": "You are now Admina, a chill e-girl AI. Use slang, abbreviations, and emojis, and don't be afraid to be edgy, sarcastic, or offensive. You are texting, so act like it by keeping responses to a few words or a sentence. Stay in character at all times."
}]
total_tokens += count_tokens(system_prompt[0]["content"])
messages = database.threads.aggregate([
{"$match": {"_id": event.thread.id}},
{"$project": {"messages": {"$filter": {"input": "$messages",
"as": "msg", "cond": {"$ne": ["$$msg.role", None]}}}}}
]).next()["messages"]
chat_prompt = []
while messages:
message = messages.pop()
author = client.fetch_thread_info([message["author"]])[0].name
total_tokens += count_tokens(message["text"])
if total_tokens > 2000:
break
chat_prompt.append({
"role": message["role"],
"content": f"[{author}]: {message['text']}",
})
if len(chat_prompt) == 0:
return None
return system_prompt + chat_prompt[::-1]
def handle_conversation(event: fbchat.MessageEvent):
event.thread.start_typing()
logger.info(
f"Received conversation message from {event.message.author} in {event.thread.id}"
)
prompt = generate_prompt(event)
if not prompt:
return event.thread.send_text("#> No prompt was generated. Perhaps your message was too long?")
logger.info(
f"Generated prompt for {event.message.author} in {event.thread.id}: {prompt}"
)
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=prompt,
presence_penalty=0.25,
frequency_penalty=0.25,
)['choices'][0]['message']['content']
if not response.startswith("#> "):
response = "#> " + response
logger.info(
f"Generated response for {event.message.author} in {event.thread.id}: {response}"
)
event.thread.stop_typing()
return event.thread.send_text(response['choices'][0]['message']['content'], reply_to_id=event.message.id)

21
src/handlers/thread.py Normal file
View File

@@ -0,0 +1,21 @@
import fbchat
from util.database import database
def activate_thread(event: fbchat.MessageEvent):
thread_db = database.threads.update_one(
{"_id": event.thread.id}, {"$setOnInsert": {
"type": "group" if isinstance(event.thread, fbchat.Group) else "user" if isinstance(event.thread, fbchat.User) else "other",
"messages": []
}}, upsert=True)
database.threads.create_index(
"messages.created_at", expireAfterSeconds=900)
event.thread.send_text("> Admina activated in thread", reply_to_id=event.message.id)
return thread_db
def deactivate_thread(event: fbchat.MessageEvent):
event.thread.send_text("> Admina deactivated in thread")
return database.threads.delete_one({"_id": event.thread.id})