🚀 Maaany things

This commit is contained in:
2024-02-15 14:11:16 +01:00
parent 57bcf0af8e
commit 5be7937ffa
7 changed files with 272 additions and 50 deletions

View File

@@ -1,49 +1,43 @@
import openai
import anthropic
import gradio as gr
import orjson
from abc import ABC, abstractmethod
from .BaseLLMEngine import BaseLLMEngine
OPENAI_POSSIBLE_MODELS = [
"gpt-3.5-turbo-0125",
"gpt-4-turbo-preview",
# Assuming these are the models supported by Anthropics that you wish to include
ANTHROPIC_POSSIBLE_MODELS = [
"claude-2.1",
# Add more models as needed
]
class OpenaiLLMEngine(BaseLLMEngine):
class AnthropicsLLMEngine(BaseLLMEngine):
num_options = 1
name = "OpenAI"
description = "OpenAI language model engine."
name = "Anthropics"
description = "Anthropics language model engine."
def __init__(self, options: list) -> None:
self.model = options[0]
self.client = anthropic.Anthropic(api_key="YourAnthropicAPIKeyHere") # Ensure API key is securely managed
super().__init__()
def generate(self, system_prompt: str, chat_prompt: str, max_tokens: int = 512, temperature: float = 1.0, json_mode: bool= False, top_p: float = 1, frequency_penalty: float = 0, presence_penalty: float = 0) -> str:
response = openai.chat.completions.create(
model=self.model,
def generate(self, system_prompt: str, chat_prompt: str, max_tokens: int = 1024, temperature: float = 1.0, json_mode: bool = False, top_p: float = 1, frequency_penalty: float = 0, presence_penalty: float = 0) -> str | dict:
# Note: Adjust the parameters as per Anthropics API capabilities
message = self.client.messages.create(
max_tokens=max_tokens,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": chat_prompt},
],
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
frequency_penalty=frequency_penalty,
presence_penalty=presence_penalty,
response_format={ "type": "json_object" } if json_mode else openai._types.NOT_GIVEN
model=self.model,
)
return response.choices[0].message.content if not json_mode else orjson.loads(response.choices[0].message.content)
return message.content
@classmethod
def get_options(cls) -> list:
return [
gr.Dropdown(
label="Model",
choices=OPENAI_POSSIBLE_MODELS,
choices=ANTHROPIC_POSSIBLE_MODELS,
max_choices=1,
value=OPENAI_POSSIBLE_MODELS[0]
value=ANTHROPIC_POSSIBLE_MODELS[0]
)
]
]