From 7c2a263b230e557b7d6f89398dcdf2fea8488a8c Mon Sep 17 00:00:00 2001 From: Paillat Date: Tue, 31 Oct 2023 12:49:31 +0100 Subject: [PATCH] =?UTF-8?q?=F0=9F=94=A7=20chore(claude.py):=20increase=20m?= =?UTF-8?q?ax=5Ftokens=5Fto=5Fsample=20value=20to=20512=20for=20better=20c?= =?UTF-8?q?ompletion=20results=20The=20`max=5Ftokens=5Fto=5Fsample`=20valu?= =?UTF-8?q?e=20in=20the=20`anthropic.completions.create`=20function=20call?= =?UTF-8?q?=20has=20been=20increased=20from=20300=20to=20512.=20This=20cha?= =?UTF-8?q?nge=20is=20made=20to=20improve=20the=20completion=20results=20b?= =?UTF-8?q?y=20allowing=20the=20model=20to=20generate=20longer=20responses?= =?UTF-8?q?.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/chatUtils/requesters/claude.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/chatUtils/requesters/claude.py b/src/chatUtils/requesters/claude.py index 29a530f..0b90f6b 100644 --- a/src/chatUtils/requesters/claude.py +++ b/src/chatUtils/requesters/claude.py @@ -20,8 +20,7 @@ async def claude(messages): elif message["role"] == "function": ... prompt += AI_PROMPT - completion = await anthropic.completions.create(stop_sequences=["\n\nHuman (", "\n\nSYSTEM: "], model="claude-2", max_tokens_to_sample=300, prompt=prompt) - print(prompt) + completion = await anthropic.completions.create(stop_sequences=["\n\nHuman (", "\n\nSYSTEM: "], model="claude-2", max_tokens_to_sample=512, prompt=prompt) return { "name": "send_message", "arguments": {"message": completion.completion},