From fc370828dfba8d97e45a68c7f09cc4a820cd1de4 Mon Sep 17 00:00:00 2001 From: Paillat Date: Tue, 31 Oct 2023 12:57:06 +0100 Subject: [PATCH] :art: chore: run black to format the code --- src/ChatProcess.py | 2 +- src/chatUtils/prompts.py | 6 ++++-- src/chatUtils/requesters/claude.py | 12 +++++++++--- src/chatUtils/requesters/request.py | 1 + 4 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/ChatProcess.py b/src/ChatProcess.py index fd6e90a..bbd16aa 100644 --- a/src/ChatProcess.py +++ b/src/ChatProcess.py @@ -108,7 +108,7 @@ class Chat: messages: list[discord.Message] = await fetch_messages_history( self.message.channel, 10, self.original_message ) - #if latst item id is not original message id, add original message to messages + # if latst item id is not original message id, add original message to messages if messages[-1].id != self.original_message.id: messages.append(self.original_message) self.context = [] diff --git a/src/chatUtils/prompts.py b/src/chatUtils/prompts.py index 2243be1..fdb9ce3 100644 --- a/src/chatUtils/prompts.py +++ b/src/chatUtils/prompts.py @@ -34,7 +34,8 @@ def createPrompt( sysprompt.replace("[server-name]", guildName) .replace("[channel-name]", channelName) .replace( - "[date-and-time]", datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S") + "[date-and-time]", + datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S"), ) ) prompt[0]["content"] = sysprompt @@ -44,7 +45,8 @@ def createPrompt( .replace("[server-name]", guildName) .replace("[channel-name]", channelName) .replace( - "[date-and-time]", datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S") + "[date-and-time]", + datetime.datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S"), ) ) else: diff --git a/src/chatUtils/requesters/claude.py b/src/chatUtils/requesters/claude.py index 0b90f6b..ef7710b 100644 --- a/src/chatUtils/requesters/claude.py +++ b/src/chatUtils/requesters/claude.py @@ -7,8 +7,9 @@ anthropic = AsyncAnthropic( api_key=os.getenv("ANTHROPIC_KEY"), ) + async def claude(messages): - #messages are a dict {} with content and roler. + # messages are a dict {} with content and roler. prompt = "" for message in messages: if message["role"] == "system": @@ -20,8 +21,13 @@ async def claude(messages): elif message["role"] == "function": ... prompt += AI_PROMPT - completion = await anthropic.completions.create(stop_sequences=["\n\nHuman (", "\n\nSYSTEM: "], model="claude-2", max_tokens_to_sample=512, prompt=prompt) + completion = await anthropic.completions.create( + stop_sequences=["\n\nHuman (", "\n\nSYSTEM: "], + model="claude-2", + max_tokens_to_sample=512, + prompt=prompt, + ) return { "name": "send_message", "arguments": {"message": completion.completion}, - } # a dummy function call is created. \ No newline at end of file + } # a dummy function call is created. diff --git a/src/chatUtils/requesters/request.py b/src/chatUtils/requesters/request.py index 509c1eb..7c4e74e 100644 --- a/src/chatUtils/requesters/request.py +++ b/src/chatUtils/requesters/request.py @@ -5,6 +5,7 @@ from src.chatUtils.requesters.llama import llama from src.chatUtils.requesters.llama2 import llama2 from src.chatUtils.requesters.claude import claude + class ModelNotFound(Exception): pass