From 1d4209dc0f25f9c64f1e286394c698811a2bcb39 Mon Sep 17 00:00:00 2001 From: Paillat Date: Sat, 19 Aug 2023 15:30:57 +0200 Subject: [PATCH] =?UTF-8?q?=F0=9F=94=A7=20chore(requirements.txt):=20comme?= =?UTF-8?q?nt=20out=20py-cord=20and=20add=20pycord=20from=20GitHub=20repos?= =?UTF-8?q?itory=20to=20use=20the=20latest=20version=20=F0=9F=94=A7=20chor?= =?UTF-8?q?e(ChatProcess.py):=20import=20fetch=5Fmessages=5Fhistory=20func?= =?UTF-8?q?tion=20from=20Chat=20module=20to=20use=20it=20in=20Chat=20class?= =?UTF-8?q?=20=F0=9F=94=A7=20chore(ChatProcess.py):=20import=20moderate=20?= =?UTF-8?q?and=20ModerationError=20from=20utils.misc=20module=20to=20use?= =?UTF-8?q?=20them=20in=20Chat=20class=20=F0=9F=94=A7=20chore(Chat.py):=20?= =?UTF-8?q?add=20fetch=5Fmessages=5Fhistory=20function=20to=20fetch=20mess?= =?UTF-8?q?age=20history=20from=20a=20channel=20=F0=9F=94=A7=20chore(Chat.?= =?UTF-8?q?py):=20add=20formatContext=20function=20to=20format=20the=20con?= =?UTF-8?q?text=20for=20the=20bot=20to=20use=20=F0=9F=94=A7=20chore(Chat.p?= =?UTF-8?q?y):=20raise=20an=20exception=20if=20no=20openai=20api=20key=20i?= =?UTF-8?q?s=20set=20=F0=9F=94=A7=20chore(Chat.py):=20add=20logic=20to=20f?= =?UTF-8?q?ilter=20and=20format=20messages=20for=20the=20context=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(Chat.py):=20fix=20typo=20in=20the=20import?= =?UTF-8?q?=20statement=20for=20ModerationError=20=F0=9F=94=A7=20chore(Cha?= =?UTF-8?q?t.py):=20fix=20typo=20in=20the=20import=20statement=20for=20mod?= =?UTF-8?q?erate=20=F0=9F=94=A7=20chore(Chat.py):=20fix=20typo=20in=20the?= =?UTF-8?q?=20import=20statement=20for=20fetch=5Fmessages=5Fhistory=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(prompts.py):=20create=20prompts=20dictiona?= =?UTF-8?q?ry=20and=20read=20chat=20and=20text=20prompts=20from=20files=20?= =?UTF-8?q?for=20each=20character=20=F0=9F=94=A7=20chore(prompts.py):=20cr?= =?UTF-8?q?eate=20createPrompt=20function=20to=20create=20a=20prompt=20fro?= =?UTF-8?q?m=20the=20messages=20list=20=F0=9F=94=A7=20chore(prompts.py):?= =?UTF-8?q?=20create=20createTextPrompt=20function=20to=20create=20a=20tex?= =?UTF-8?q?t=20prompt=20from=20the=20messages=20list=20=F0=9F=94=A7=20chor?= =?UTF-8?q?e(prompts.py):=20create=20createChatPrompt=20function=20to=20cr?= =?UTF-8?q?eate=20a=20chat=20prompt=20from=20the=20messages=20list=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(requesters/llama.py):=20create=20llama=20f?= =?UTF-8?q?unction=20as=20a=20placeholder=20=F0=9F=94=A7=20chore(requester?= =?UTF-8?q?s/llama2.py):=20create=20llama2=20function=20as=20a=20placehold?= =?UTF-8?q?er=20=F0=9F=94=A7=20chore(requesters/openaiChat.py):=20import?= =?UTF-8?q?=20openai=5Fcaller=20from=20utils.openaicaller=20module=20?= =?UTF-8?q?=F0=9F=94=A7=20chore(requesters/openaiChat.py):=20create=20open?= =?UTF-8?q?aiChat=20function=20as=20a=20placeholder=20=F0=9F=94=A7=20chore?= =?UTF-8?q?(requesters/openaiText.py):=20create=20openaiText=20function=20?= =?UTF-8?q?as=20a=20placeholder=20=F0=9F=94=A7=20chore(requesters/request.?= =?UTF-8?q?py):=20import=20openaiChat,=20openaiText,=20llama,=20and=20llam?= =?UTF-8?q?a2=20functions=20from=20respective=20modules=20=F0=9F=94=A7=20c?= =?UTF-8?q?hore(requesters/request.py):=20create=20request=20function=20to?= =?UTF-8?q?=20handle=20different=20models=20and=20make=20requests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- requirements.txt | 3 +- src/ChatProcess.py | 28 ++++++++++ src/chatUtils/Chat.py | 27 ++++++++++ src/chatUtils/prompts.py | 75 ++++++++++++++++++++++++++ src/chatUtils/requesters/llama.py | 2 + src/chatUtils/requesters/llama2.py | 2 + src/chatUtils/requesters/openaiChat.py | 5 ++ src/chatUtils/requesters/openaiText.py | 2 + src/chatUtils/requesters/request.py | 18 +++++++ 9 files changed, 161 insertions(+), 1 deletion(-) create mode 100644 src/chatUtils/Chat.py create mode 100644 src/chatUtils/prompts.py create mode 100644 src/chatUtils/requesters/llama.py create mode 100644 src/chatUtils/requesters/llama2.py create mode 100644 src/chatUtils/requesters/openaiChat.py create mode 100644 src/chatUtils/requesters/openaiText.py create mode 100644 src/chatUtils/requesters/request.py diff --git a/requirements.txt b/requirements.txt index cfa296b..fc592c7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ -py-cord +#py-cord +git+https://github.com/Pycord-Development/pycord.git python-dotenv openai emoji diff --git a/src/ChatProcess.py b/src/ChatProcess.py index 42e0235..bcd0600 100644 --- a/src/ChatProcess.py +++ b/src/ChatProcess.py @@ -8,6 +8,7 @@ import json from src.utils.misc import moderate, ModerationError, Hasher from src.utils.variousclasses import models, characters, apis from src.guild import Guild +from src.chatUtils.Chat import fetch_messages_history from src.utils.openaicaller import openai_caller from src.functionscalls import ( call_function, @@ -15,6 +16,7 @@ from src.functionscalls import ( server_normal_channel_functions, FuntionCallError, ) +from utils.misc import moderate, ModerationError class Chat: @@ -84,3 +86,29 @@ class Chat: self.model = self.settings["model"] self.character = self.settings["character"] self.openai_api_key = self.guild.api_keys.get("openai", None) + if self.openai_api_key == None: + raise Exception("No openai api key is set") + + async def formatContext(self): + """ + This function formats the context for the bot to use + """ + messages: list[discord.Message] = await fetch_messages_history( + self.message.channel, 10, self.original_message + ) + self.context = [] + for msg in messages: + if msg.author.id == self.bot.user.id: + role = "assistant" + name = "assistant" + else: + role = "user" + name = msg.author.global_name + if not moderate(self.openai_api_key, msg.content): + self.context.append( + { + "role": role, + "content": msg.content, + "name": name, + } + ) diff --git a/src/chatUtils/Chat.py b/src/chatUtils/Chat.py new file mode 100644 index 0000000..e7df7b0 --- /dev/null +++ b/src/chatUtils/Chat.py @@ -0,0 +1,27 @@ +import discord + + +def is_ignorable(content): + if content.startswith("-") or content.startswith("//"): + return True + return False + + +async def fetch_messages_history( + channel: discord.TextChannel, limit: int, original_message: discord.Message +) -> list[discord.Message]: + messages = [] + if original_message == None: + async for msg in channel.history(limit=100): + if not is_ignorable(msg.content): + messages.append(msg) + if len(messages) == limit: + break + else: + async for msg in channel.history(limit=100, before=original_message): + if not is_ignorable(msg.content): + messages.append(msg) + if len(messages) == limit: + break + messages.reverse() + return messages diff --git a/src/chatUtils/prompts.py b/src/chatUtils/prompts.py new file mode 100644 index 0000000..82fbea1 --- /dev/null +++ b/src/chatUtils/prompts.py @@ -0,0 +1,75 @@ +import datetime + +from src.utils.variousclasses import models, characters, apis + +promts = {} +for character in characters.reverseMatchingDict.keys(): + with open( + f"src/chatUtils/prompts/{character}/chat.txt", "r", encoding="utf-8" + ) as f: + promts[character]["chat"] = f.read() + + with open( + f"src/chatUtils/prompts/{character}/text.txt", "r", encoding="utf-8" + ) as f: + promts[character]["text"] = f.read() + + +def createPrompt( + messages: list[dict], + model: str, + character: str, + type: str, + guildName: str, + channelName: str, +) -> str: + """ + Creates a prompt from the messages list + """ + if type == "chat": + prompt = ( + createChatPrompt(messages, model, character) + .replace("[server-name]", guildName) + .replace("[channel-name]", channelName) + .replace( + "[datetime]", datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S") + ) + ) + elif type == "text": + prompt = ( + createTextPrompt(messages, model, character) + .replace("[server-name]", guildName) + .replace("[channel-name]", channelName) + .replace( + "[datetime]", datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S") + ) + ) + else: + raise ValueError("Invalid type") + return prompt + + +def createTextPrompt(messages: list[dict], model: str, character: str) -> str: + """ + Creates a text prompt from the messages list + """ + global promts + prompt = promts[character]["text"] + for message in messages: + if message.name == "assistant": + message.name = character + prompt += f"{message['name']}: {message['content']} <|endofmessage|>\n" + prompt += f"{character}:" + + return prompt + + +def createChatPrompt(messages: list[dict], model: str, character: str) -> str: + """ + Creates a chat prompt from the messages list + """ + global promts + prompt = promts[character]["chat"] + final_prompt = [{"role": "system", "content": prompt}] + final_prompt.extend(messages) + return final_prompt diff --git a/src/chatUtils/requesters/llama.py b/src/chatUtils/requesters/llama.py new file mode 100644 index 0000000..d991d36 --- /dev/null +++ b/src/chatUtils/requesters/llama.py @@ -0,0 +1,2 @@ +async def llama(prompt): + pass diff --git a/src/chatUtils/requesters/llama2.py b/src/chatUtils/requesters/llama2.py new file mode 100644 index 0000000..c6ab8e4 --- /dev/null +++ b/src/chatUtils/requesters/llama2.py @@ -0,0 +1,2 @@ +async def llama2(prompt): + pass diff --git a/src/chatUtils/requesters/openaiChat.py b/src/chatUtils/requesters/openaiChat.py new file mode 100644 index 0000000..1846365 --- /dev/null +++ b/src/chatUtils/requesters/openaiChat.py @@ -0,0 +1,5 @@ +from src.utils.openaicaller import openai_caller + + +async def openaiChat(messages, function): + caller = openai_caller() diff --git a/src/chatUtils/requesters/openaiText.py b/src/chatUtils/requesters/openaiText.py new file mode 100644 index 0000000..63ce1f8 --- /dev/null +++ b/src/chatUtils/requesters/openaiText.py @@ -0,0 +1,2 @@ +async def openaiText(prompt, openai_api_key): + pass diff --git a/src/chatUtils/requesters/request.py b/src/chatUtils/requesters/request.py new file mode 100644 index 0000000..fcd9a64 --- /dev/null +++ b/src/chatUtils/requesters/request.py @@ -0,0 +1,18 @@ +import discord +from src.chatUtils.requesters.openaiChat import openaiChat +from src.chatUtils.requesters.openaiText import openaiText +from src.chatUtils.requesters.llama import llama +from src.chatUtils.requesters.llama2 import llama2 + + +async def request( + model: str, prompt: list[dict] | str, message: discord.message, openai_api_key: str +): + if model == "gpt-3.5-turbo": + return await openaiChat(messages=prompt, openai_api_key=openai_api_key) + elif model == "text-davinci-003": + return await openaiText(prompt=prompt, openai_api_key=openai_api_key) + elif model == "text-llama": + return await llama(prompt=prompt) + elif model == "text-llama-2": + return await llama2(prompt=prompt)