mirror of
https://github.com/Paillat-dev/viralfactory.git
synced 2026-01-02 01:06:19 +00:00
Add anthropic llm engine
This commit is contained in:
@@ -4,24 +4,34 @@ import orjson
|
||||
|
||||
from .BaseLLMEngine import BaseLLMEngine
|
||||
|
||||
# Assuming these are the models supported by Anthropics that you wish to include
|
||||
ANTHROPIC_POSSIBLE_MODELS = [
|
||||
"claude-2.1",
|
||||
# Add more models as needed
|
||||
]
|
||||
|
||||
class AnthropicsLLMEngine(BaseLLMEngine):
|
||||
|
||||
class AnthropicLLMEngine(BaseLLMEngine):
|
||||
num_options = 1
|
||||
name = "Anthropics"
|
||||
description = "Anthropics language model engine."
|
||||
name = "Anthropic"
|
||||
description = "Anthropic language model engine."
|
||||
|
||||
def __init__(self, options: list) -> None:
|
||||
self.model = options[0]
|
||||
self.client = anthropic.Anthropic(api_key="YourAnthropicAPIKeyHere") # Ensure API key is securely managed
|
||||
self.client = anthropic.Anthropic(
|
||||
api_key="YourAnthropicAPIKeyHere"
|
||||
) # Ensure API key is securely managed
|
||||
super().__init__()
|
||||
|
||||
def generate(self, system_prompt: str, chat_prompt: str, max_tokens: int = 1024, temperature: float = 1.0, json_mode: bool = False, top_p: float = 1, frequency_penalty: float = 0, presence_penalty: float = 0) -> str | dict:
|
||||
# Note: Adjust the parameters as per Anthropics API capabilities
|
||||
def generate(
|
||||
self,
|
||||
system_prompt: str,
|
||||
chat_prompt: str,
|
||||
max_tokens: int = 1024,
|
||||
temperature: float = 1.0,
|
||||
json_mode: bool = False,
|
||||
top_p: float = 1,
|
||||
frequency_penalty: float = 0,
|
||||
presence_penalty: float = 0,
|
||||
) -> str | dict:
|
||||
prompt = f"""{anthropic.HUMAN_PROMPT} {system_prompt} {anthropic.HUMAN_PROMPT} {chat_prompt} {anthropic.AI_PROMPT}"""
|
||||
if json_mode:
|
||||
# anthopic does not officially support JSON mode, but we can bias the output towards a JSON-like format
|
||||
@@ -37,10 +47,10 @@ class AnthropicsLLMEngine(BaseLLMEngine):
|
||||
|
||||
content = response.completion
|
||||
if json_mode:
|
||||
#we add back the opening curly brace wich is not included in the response since it is in the prompt
|
||||
# we add back the opening curly brace wich is not included in the response since it is in the prompt
|
||||
content = "{" + content
|
||||
#we remove everything after the last closing curly brace
|
||||
content = content[:content.rfind("}") + 1]
|
||||
# we remove everything after the last closing curly brace
|
||||
content = content[: content.rfind("}") + 1]
|
||||
return orjson.loads(content)
|
||||
else:
|
||||
return content
|
||||
@@ -52,6 +62,6 @@ class AnthropicsLLMEngine(BaseLLMEngine):
|
||||
label="Model",
|
||||
choices=ANTHROPIC_POSSIBLE_MODELS,
|
||||
max_choices=1,
|
||||
value=ANTHROPIC_POSSIBLE_MODELS[0]
|
||||
value=ANTHROPIC_POSSIBLE_MODELS[0],
|
||||
)
|
||||
]
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
from .BaseLLMEngine import BaseLLMEngine
|
||||
from .OpenaiLLMEngine import OpenaiLLMEngine
|
||||
from .OpenaiLLMEngine import OpenaiLLMEngine
|
||||
from .AnthropicLLMEngine import AnthropicLLMEngine
|
||||
|
||||
Reference in New Issue
Block a user