Files
Botator/src/ChatProcess.py

115 lines
4.1 KiB
Python
Raw Normal View History

import asyncio
import os
import re
import discord
import datetime
import json
from src.utils.misc import moderate, ModerationError, Hasher
from src.utils.variousclasses import models, characters, apis
from src.guild import Guild
🔧 chore(requirements.txt): comment out py-cord and add pycord from GitHub repository to use the latest version 🔧 chore(ChatProcess.py): import fetch_messages_history function from Chat module to use it in Chat class 🔧 chore(ChatProcess.py): import moderate and ModerationError from utils.misc module to use them in Chat class 🔧 chore(Chat.py): add fetch_messages_history function to fetch message history from a channel 🔧 chore(Chat.py): add formatContext function to format the context for the bot to use 🔧 chore(Chat.py): raise an exception if no openai api key is set 🔧 chore(Chat.py): add logic to filter and format messages for the context 🔧 chore(Chat.py): fix typo in the import statement for ModerationError 🔧 chore(Chat.py): fix typo in the import statement for moderate 🔧 chore(Chat.py): fix typo in the import statement for fetch_messages_history 🔧 chore(prompts.py): create prompts dictionary and read chat and text prompts from files for each character 🔧 chore(prompts.py): create createPrompt function to create a prompt from the messages list 🔧 chore(prompts.py): create createTextPrompt function to create a text prompt from the messages list 🔧 chore(prompts.py): create createChatPrompt function to create a chat prompt from the messages list 🔧 chore(requesters/llama.py): create llama function as a placeholder 🔧 chore(requesters/llama2.py): create llama2 function as a placeholder 🔧 chore(requesters/openaiChat.py): import openai_caller from utils.openaicaller module 🔧 chore(requesters/openaiChat.py): create openaiChat function as a placeholder 🔧 chore(requesters/openaiText.py): create openaiText function as a placeholder 🔧 chore(requesters/request.py): import openaiChat, openaiText, llama, and llama2 functions from respective modules 🔧 chore(requesters/request.py): create request function to handle different models and make requests
2023-08-19 15:30:57 +02:00
from src.chatUtils.Chat import fetch_messages_history
from src.utils.openaicaller import openai_caller
from src.functionscalls import (
call_function,
functions,
server_normal_channel_functions,
FuntionCallError,
)
🔧 chore(requirements.txt): comment out py-cord and add pycord from GitHub repository to use the latest version 🔧 chore(ChatProcess.py): import fetch_messages_history function from Chat module to use it in Chat class 🔧 chore(ChatProcess.py): import moderate and ModerationError from utils.misc module to use them in Chat class 🔧 chore(Chat.py): add fetch_messages_history function to fetch message history from a channel 🔧 chore(Chat.py): add formatContext function to format the context for the bot to use 🔧 chore(Chat.py): raise an exception if no openai api key is set 🔧 chore(Chat.py): add logic to filter and format messages for the context 🔧 chore(Chat.py): fix typo in the import statement for ModerationError 🔧 chore(Chat.py): fix typo in the import statement for moderate 🔧 chore(Chat.py): fix typo in the import statement for fetch_messages_history 🔧 chore(prompts.py): create prompts dictionary and read chat and text prompts from files for each character 🔧 chore(prompts.py): create createPrompt function to create a prompt from the messages list 🔧 chore(prompts.py): create createTextPrompt function to create a text prompt from the messages list 🔧 chore(prompts.py): create createChatPrompt function to create a chat prompt from the messages list 🔧 chore(requesters/llama.py): create llama function as a placeholder 🔧 chore(requesters/llama2.py): create llama2 function as a placeholder 🔧 chore(requesters/openaiChat.py): import openai_caller from utils.openaicaller module 🔧 chore(requesters/openaiChat.py): create openaiChat function as a placeholder 🔧 chore(requesters/openaiText.py): create openaiText function as a placeholder 🔧 chore(requesters/request.py): import openaiChat, openaiText, llama, and llama2 functions from respective modules 🔧 chore(requesters/request.py): create request function to handle different models and make requests
2023-08-19 15:30:57 +02:00
from utils.misc import moderate, ModerationError
class Chat:
def __init__(self, bot, message: discord.Message):
self.bot = bot
self.message: discord.Message = message
self.guild = Guild(self.message.guild.id)
self.author = message.author
self.is_bots_thread = False
async def getSupplementaryData(self) -> None:
"""
This function gets various contextual data that will be needed later on
- The original message (if the message is a reply to a previous message from the bot)
- The channel the message was sent in (if the message was sent in a thread)
"""
if isinstance(self.message.channel, discord.Thread):
if self.message.channel.owner_id == self.bot.user.id:
self.is_bots_thread = True
self.channelIdForSettings = self.message.channel.parent_id
else:
self.channelIdForSettings = self.message.channel.id
try:
self.original_message = await self.message.channel.fetch_message(
self.message.reference.message_id
)
except:
self.original_message = None
if (
self.original_message != None
and self.original_message.author.id == self.bot.user.id
):
self.original_message = None
async def preExitCriteria(self) -> bool:
"""
Returns True if any of the exit criterias are met
This checks if the guild has the needed settings for the bot to work
"""
returnCriterias = []
returnCriterias.append(self.message.author.id == self.bot.user.id)
returnCriterias.append(self.api_key == None)
returnCriterias.append(self.is_active == 0)
return any(returnCriterias)
async def postExitCriteria(self) -> bool:
"""
Returns True if any of the exit criterias are met (their opposite is met but there is a not in front of the any() function)
This checks if the bot should actuallly respond to the message or if the message doesn't concern the bot
"""
returnCriterias = []
returnCriterias.append(
self.guild.sanitizedChannels.get(str(self.message.channel.id), None) != None
)
returnCriterias.append(
self.message.content.find("<@" + str(self.bot.user.id) + ">") != -1
)
returnCriterias.append(self.original_message != None)
returnCriterias.append(self.is_bots_thread)
return not any(returnCriterias)
async def getSettings(self):
self.settings = self.guild.getChannelInfo(str(self.channelIdForSettings))
self.model = self.settings["model"]
self.character = self.settings["character"]
self.openai_api_key = self.guild.api_keys.get("openai", None)
🔧 chore(requirements.txt): comment out py-cord and add pycord from GitHub repository to use the latest version 🔧 chore(ChatProcess.py): import fetch_messages_history function from Chat module to use it in Chat class 🔧 chore(ChatProcess.py): import moderate and ModerationError from utils.misc module to use them in Chat class 🔧 chore(Chat.py): add fetch_messages_history function to fetch message history from a channel 🔧 chore(Chat.py): add formatContext function to format the context for the bot to use 🔧 chore(Chat.py): raise an exception if no openai api key is set 🔧 chore(Chat.py): add logic to filter and format messages for the context 🔧 chore(Chat.py): fix typo in the import statement for ModerationError 🔧 chore(Chat.py): fix typo in the import statement for moderate 🔧 chore(Chat.py): fix typo in the import statement for fetch_messages_history 🔧 chore(prompts.py): create prompts dictionary and read chat and text prompts from files for each character 🔧 chore(prompts.py): create createPrompt function to create a prompt from the messages list 🔧 chore(prompts.py): create createTextPrompt function to create a text prompt from the messages list 🔧 chore(prompts.py): create createChatPrompt function to create a chat prompt from the messages list 🔧 chore(requesters/llama.py): create llama function as a placeholder 🔧 chore(requesters/llama2.py): create llama2 function as a placeholder 🔧 chore(requesters/openaiChat.py): import openai_caller from utils.openaicaller module 🔧 chore(requesters/openaiChat.py): create openaiChat function as a placeholder 🔧 chore(requesters/openaiText.py): create openaiText function as a placeholder 🔧 chore(requesters/request.py): import openaiChat, openaiText, llama, and llama2 functions from respective modules 🔧 chore(requesters/request.py): create request function to handle different models and make requests
2023-08-19 15:30:57 +02:00
if self.openai_api_key == None:
raise Exception("No openai api key is set")
async def formatContext(self):
"""
This function formats the context for the bot to use
"""
messages: list[discord.Message] = await fetch_messages_history(
self.message.channel, 10, self.original_message
)
self.context = []
for msg in messages:
if msg.author.id == self.bot.user.id:
role = "assistant"
name = "assistant"
else:
role = "user"
name = msg.author.global_name
if not moderate(self.openai_api_key, msg.content):
self.context.append(
{
"role": role,
"content": msg.content,
"name": name,
}
)