mirror of
https://github.com/Paillat-dev/Botator.git
synced 2026-01-02 01:06:19 +00:00
🔧 chore(requirements.txt): comment out py-cord and add pycord from GitHub repository to use the latest version
🔧 chore(ChatProcess.py): import fetch_messages_history function from Chat module to use it in Chat class 🔧 chore(ChatProcess.py): import moderate and ModerationError from utils.misc module to use them in Chat class 🔧 chore(Chat.py): add fetch_messages_history function to fetch message history from a channel 🔧 chore(Chat.py): add formatContext function to format the context for the bot to use 🔧 chore(Chat.py): raise an exception if no openai api key is set 🔧 chore(Chat.py): add logic to filter and format messages for the context 🔧 chore(Chat.py): fix typo in the import statement for ModerationError 🔧 chore(Chat.py): fix typo in the import statement for moderate 🔧 chore(Chat.py): fix typo in the import statement for fetch_messages_history 🔧 chore(prompts.py): create prompts dictionary and read chat and text prompts from files for each character 🔧 chore(prompts.py): create createPrompt function to create a prompt from the messages list 🔧 chore(prompts.py): create createTextPrompt function to create a text prompt from the messages list 🔧 chore(prompts.py): create createChatPrompt function to create a chat prompt from the messages list 🔧 chore(requesters/llama.py): create llama function as a placeholder 🔧 chore(requesters/llama2.py): create llama2 function as a placeholder 🔧 chore(requesters/openaiChat.py): import openai_caller from utils.openaicaller module 🔧 chore(requesters/openaiChat.py): create openaiChat function as a placeholder 🔧 chore(requesters/openaiText.py): create openaiText function as a placeholder 🔧 chore(requesters/request.py): import openaiChat, openaiText, llama, and llama2 functions from respective modules 🔧 chore(requesters/request.py): create request function to handle different models and make requests
This commit is contained in:
@@ -1,4 +1,5 @@
|
|||||||
py-cord
|
#py-cord
|
||||||
|
git+https://github.com/Pycord-Development/pycord.git
|
||||||
python-dotenv
|
python-dotenv
|
||||||
openai
|
openai
|
||||||
emoji
|
emoji
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import json
|
|||||||
from src.utils.misc import moderate, ModerationError, Hasher
|
from src.utils.misc import moderate, ModerationError, Hasher
|
||||||
from src.utils.variousclasses import models, characters, apis
|
from src.utils.variousclasses import models, characters, apis
|
||||||
from src.guild import Guild
|
from src.guild import Guild
|
||||||
|
from src.chatUtils.Chat import fetch_messages_history
|
||||||
from src.utils.openaicaller import openai_caller
|
from src.utils.openaicaller import openai_caller
|
||||||
from src.functionscalls import (
|
from src.functionscalls import (
|
||||||
call_function,
|
call_function,
|
||||||
@@ -15,6 +16,7 @@ from src.functionscalls import (
|
|||||||
server_normal_channel_functions,
|
server_normal_channel_functions,
|
||||||
FuntionCallError,
|
FuntionCallError,
|
||||||
)
|
)
|
||||||
|
from utils.misc import moderate, ModerationError
|
||||||
|
|
||||||
|
|
||||||
class Chat:
|
class Chat:
|
||||||
@@ -84,3 +86,29 @@ class Chat:
|
|||||||
self.model = self.settings["model"]
|
self.model = self.settings["model"]
|
||||||
self.character = self.settings["character"]
|
self.character = self.settings["character"]
|
||||||
self.openai_api_key = self.guild.api_keys.get("openai", None)
|
self.openai_api_key = self.guild.api_keys.get("openai", None)
|
||||||
|
if self.openai_api_key == None:
|
||||||
|
raise Exception("No openai api key is set")
|
||||||
|
|
||||||
|
async def formatContext(self):
|
||||||
|
"""
|
||||||
|
This function formats the context for the bot to use
|
||||||
|
"""
|
||||||
|
messages: list[discord.Message] = await fetch_messages_history(
|
||||||
|
self.message.channel, 10, self.original_message
|
||||||
|
)
|
||||||
|
self.context = []
|
||||||
|
for msg in messages:
|
||||||
|
if msg.author.id == self.bot.user.id:
|
||||||
|
role = "assistant"
|
||||||
|
name = "assistant"
|
||||||
|
else:
|
||||||
|
role = "user"
|
||||||
|
name = msg.author.global_name
|
||||||
|
if not moderate(self.openai_api_key, msg.content):
|
||||||
|
self.context.append(
|
||||||
|
{
|
||||||
|
"role": role,
|
||||||
|
"content": msg.content,
|
||||||
|
"name": name,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|||||||
27
src/chatUtils/Chat.py
Normal file
27
src/chatUtils/Chat.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import discord
|
||||||
|
|
||||||
|
|
||||||
|
def is_ignorable(content):
|
||||||
|
if content.startswith("-") or content.startswith("//"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_messages_history(
|
||||||
|
channel: discord.TextChannel, limit: int, original_message: discord.Message
|
||||||
|
) -> list[discord.Message]:
|
||||||
|
messages = []
|
||||||
|
if original_message == None:
|
||||||
|
async for msg in channel.history(limit=100):
|
||||||
|
if not is_ignorable(msg.content):
|
||||||
|
messages.append(msg)
|
||||||
|
if len(messages) == limit:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
async for msg in channel.history(limit=100, before=original_message):
|
||||||
|
if not is_ignorable(msg.content):
|
||||||
|
messages.append(msg)
|
||||||
|
if len(messages) == limit:
|
||||||
|
break
|
||||||
|
messages.reverse()
|
||||||
|
return messages
|
||||||
75
src/chatUtils/prompts.py
Normal file
75
src/chatUtils/prompts.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
from src.utils.variousclasses import models, characters, apis
|
||||||
|
|
||||||
|
promts = {}
|
||||||
|
for character in characters.reverseMatchingDict.keys():
|
||||||
|
with open(
|
||||||
|
f"src/chatUtils/prompts/{character}/chat.txt", "r", encoding="utf-8"
|
||||||
|
) as f:
|
||||||
|
promts[character]["chat"] = f.read()
|
||||||
|
|
||||||
|
with open(
|
||||||
|
f"src/chatUtils/prompts/{character}/text.txt", "r", encoding="utf-8"
|
||||||
|
) as f:
|
||||||
|
promts[character]["text"] = f.read()
|
||||||
|
|
||||||
|
|
||||||
|
def createPrompt(
|
||||||
|
messages: list[dict],
|
||||||
|
model: str,
|
||||||
|
character: str,
|
||||||
|
type: str,
|
||||||
|
guildName: str,
|
||||||
|
channelName: str,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Creates a prompt from the messages list
|
||||||
|
"""
|
||||||
|
if type == "chat":
|
||||||
|
prompt = (
|
||||||
|
createChatPrompt(messages, model, character)
|
||||||
|
.replace("[server-name]", guildName)
|
||||||
|
.replace("[channel-name]", channelName)
|
||||||
|
.replace(
|
||||||
|
"[datetime]", datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif type == "text":
|
||||||
|
prompt = (
|
||||||
|
createTextPrompt(messages, model, character)
|
||||||
|
.replace("[server-name]", guildName)
|
||||||
|
.replace("[channel-name]", channelName)
|
||||||
|
.replace(
|
||||||
|
"[datetime]", datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ValueError("Invalid type")
|
||||||
|
return prompt
|
||||||
|
|
||||||
|
|
||||||
|
def createTextPrompt(messages: list[dict], model: str, character: str) -> str:
|
||||||
|
"""
|
||||||
|
Creates a text prompt from the messages list
|
||||||
|
"""
|
||||||
|
global promts
|
||||||
|
prompt = promts[character]["text"]
|
||||||
|
for message in messages:
|
||||||
|
if message.name == "assistant":
|
||||||
|
message.name = character
|
||||||
|
prompt += f"{message['name']}: {message['content']} <|endofmessage|>\n"
|
||||||
|
prompt += f"{character}:"
|
||||||
|
|
||||||
|
return prompt
|
||||||
|
|
||||||
|
|
||||||
|
def createChatPrompt(messages: list[dict], model: str, character: str) -> str:
|
||||||
|
"""
|
||||||
|
Creates a chat prompt from the messages list
|
||||||
|
"""
|
||||||
|
global promts
|
||||||
|
prompt = promts[character]["chat"]
|
||||||
|
final_prompt = [{"role": "system", "content": prompt}]
|
||||||
|
final_prompt.extend(messages)
|
||||||
|
return final_prompt
|
||||||
2
src/chatUtils/requesters/llama.py
Normal file
2
src/chatUtils/requesters/llama.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
async def llama(prompt):
|
||||||
|
pass
|
||||||
2
src/chatUtils/requesters/llama2.py
Normal file
2
src/chatUtils/requesters/llama2.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
async def llama2(prompt):
|
||||||
|
pass
|
||||||
5
src/chatUtils/requesters/openaiChat.py
Normal file
5
src/chatUtils/requesters/openaiChat.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from src.utils.openaicaller import openai_caller
|
||||||
|
|
||||||
|
|
||||||
|
async def openaiChat(messages, function):
|
||||||
|
caller = openai_caller()
|
||||||
2
src/chatUtils/requesters/openaiText.py
Normal file
2
src/chatUtils/requesters/openaiText.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
async def openaiText(prompt, openai_api_key):
|
||||||
|
pass
|
||||||
18
src/chatUtils/requesters/request.py
Normal file
18
src/chatUtils/requesters/request.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import discord
|
||||||
|
from src.chatUtils.requesters.openaiChat import openaiChat
|
||||||
|
from src.chatUtils.requesters.openaiText import openaiText
|
||||||
|
from src.chatUtils.requesters.llama import llama
|
||||||
|
from src.chatUtils.requesters.llama2 import llama2
|
||||||
|
|
||||||
|
|
||||||
|
async def request(
|
||||||
|
model: str, prompt: list[dict] | str, message: discord.message, openai_api_key: str
|
||||||
|
):
|
||||||
|
if model == "gpt-3.5-turbo":
|
||||||
|
return await openaiChat(messages=prompt, openai_api_key=openai_api_key)
|
||||||
|
elif model == "text-davinci-003":
|
||||||
|
return await openaiText(prompt=prompt, openai_api_key=openai_api_key)
|
||||||
|
elif model == "text-llama":
|
||||||
|
return await llama(prompt=prompt)
|
||||||
|
elif model == "text-llama-2":
|
||||||
|
return await llama2(prompt=prompt)
|
||||||
Reference in New Issue
Block a user