2023-08-19 15:30:57 +02:00
|
|
|
import discord
|
|
|
|
|
from src.chatUtils.requesters.openaiChat import openaiChat
|
|
|
|
|
from src.chatUtils.requesters.openaiText import openaiText
|
|
|
|
|
from src.chatUtils.requesters.llama import llama
|
|
|
|
|
from src.chatUtils.requesters.llama2 import llama2
|
2023-10-31 12:08:32 +01:00
|
|
|
from src.chatUtils.requesters.claude import claude
|
2023-08-19 15:30:57 +02:00
|
|
|
|
2023-08-20 12:42:02 +02:00
|
|
|
class ModelNotFound(Exception):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2023-08-19 15:30:57 +02:00
|
|
|
async def request(
|
2023-08-20 12:42:02 +02:00
|
|
|
model: str,
|
|
|
|
|
prompt: list[dict] | str,
|
|
|
|
|
openai_api_key: str,
|
|
|
|
|
funtcions: list[dict] = None,
|
2023-09-04 12:19:08 +02:00
|
|
|
custom_temp: float = 1.2,
|
2023-08-19 15:30:57 +02:00
|
|
|
):
|
|
|
|
|
if model == "gpt-3.5-turbo":
|
2023-08-20 12:42:02 +02:00
|
|
|
return await openaiChat(
|
|
|
|
|
messages=prompt,
|
|
|
|
|
openai_api_key=openai_api_key,
|
|
|
|
|
functions=funtcions,
|
|
|
|
|
model=model,
|
2023-09-04 12:19:08 +02:00
|
|
|
temperature=custom_temp,
|
2023-08-20 12:42:02 +02:00
|
|
|
)
|
2023-08-19 15:30:57 +02:00
|
|
|
elif model == "text-davinci-003":
|
2023-08-20 12:42:02 +02:00
|
|
|
# return await openaiText(prompt=prompt, openai_api_key=openai_api_key)
|
|
|
|
|
raise NotImplementedError("This model is not supported yet")
|
2023-08-19 15:30:57 +02:00
|
|
|
elif model == "text-llama":
|
|
|
|
|
return await llama(prompt=prompt)
|
2023-08-20 12:42:02 +02:00
|
|
|
elif model == "text-llama2":
|
|
|
|
|
# return await llama2(prompt=prompt)
|
|
|
|
|
raise NotImplementedError("This model is not supported yet")
|
2023-10-31 12:08:32 +01:00
|
|
|
elif model == "claude":
|
|
|
|
|
return await claude(messages=prompt)
|
2023-08-20 12:42:02 +02:00
|
|
|
else:
|
|
|
|
|
raise ModelNotFound(f"Model {model} not found")
|