feat(server.ts): change port variable case from lowercase port to uppercase PORT to improve semantics

 feat(server.ts): add support for process.env.PORT environment variable to be able to run app on a configurable port
🐛 fix(main.py): remove duplicate cog addition in main.py
 feat(main.py): add cogs.Help(bot) to the list of cogs in main.py
🐛 fix(main.py): remove redundant import statements in main.py
 feat(main.py): add on_guild_remove event handler in main.py
 feat(main.py): add on_guild_join event handler in main.py
 feat(main.py): add support for discord.Intents in main.py
 feat(main.py): add intents.message_content = True in main.py
 feat(main.py): add intents.default() in main.py
 feat(main.py): add discord.Bot(intents=intents, help_command=None) in main.py
 feat(main.py): add import statements in main.py
 feat(main.py): add from src.config import debug, discord_token in main.py
 feat(main.py): add import discord in main.py
 feat(main.py): add import src.config in main.py
 feat(main.py): add import src.cogs in main.py
 feat(main.py): add import src.cogs.chat in main.py
 feat(main.py): add import src.cogs.manage_chat in main.py
 feat(main.py): add import src.cogs.moderation in main.py
 feat(main.py): add import src.cogs.channelSetup in main.py
 feat(main.py): add import src.cogs.help in main.py
 feat(main.py): add import src.cogs.Chat in main.py
 feat(main.py): add import src.cogs.ManageChat in main.py
 feat(main.py): add import src.cogs.Moderation in main.py
 feat(main.py): add import src.cogs.ChannelSetup in main.py
 feat(main.py): add import src.cogs.Help in main.py
 feat(main.py): add import src.cogs.chat in main.py
 feat(main.py): add import src.cogs.manage_chat in main.py
 feat(main.py): add import src.cogs.moderation in main.py
 feat(main.py): add
This commit is contained in:
2023-08-20 12:42:02 +02:00
parent 1d4209dc0f
commit e4b8e2824b
21 changed files with 350 additions and 860 deletions

View File

@@ -5,14 +5,30 @@ from src.chatUtils.requesters.llama import llama
from src.chatUtils.requesters.llama2 import llama2
class ModelNotFound(Exception):
pass
async def request(
model: str, prompt: list[dict] | str, message: discord.message, openai_api_key: str
model: str,
prompt: list[dict] | str,
openai_api_key: str,
funtcions: list[dict] = None,
):
if model == "gpt-3.5-turbo":
return await openaiChat(messages=prompt, openai_api_key=openai_api_key)
return await openaiChat(
messages=prompt,
openai_api_key=openai_api_key,
functions=funtcions,
model=model,
)
elif model == "text-davinci-003":
return await openaiText(prompt=prompt, openai_api_key=openai_api_key)
# return await openaiText(prompt=prompt, openai_api_key=openai_api_key)
raise NotImplementedError("This model is not supported yet")
elif model == "text-llama":
return await llama(prompt=prompt)
elif model == "text-llama-2":
return await llama2(prompt=prompt)
elif model == "text-llama2":
# return await llama2(prompt=prompt)
raise NotImplementedError("This model is not supported yet")
else:
raise ModelNotFound(f"Model {model} not found")