mirror of
https://github.com/Paillat-dev/viralfactory.git
synced 2026-01-02 01:06:19 +00:00
feat(GenerationContext.py): add new file GenerationContext.py to handle the context of generation engines
feat(OpenaiLLMEngine.py): add orjson library for JSON serialization and deserialization, and implement the generate method to make API call to OpenAI chat completions endpoint feat(__init__.py): import OpenaiLLMEngine in LLMEngine package feat(BaseScriptEngine.py): add time_script method to the BaseScriptEngine class feat(CustomScriptEngine.py): add new file CustomScriptEngine.py to handle custom script generation, implement generate method to return the provided script, and add get_options method to provide a textbox for the prompt input feat(__init__.py): import CustomScriptEngine in ScriptEngine package feat(__init__.py): import LLMEngine package and add OpenaiLLMEngine to the ENGINES dictionary refactor(gradio_ui.py): change equal_height attribute of Row to False to allow different heights for input blocks
This commit is contained in:
17
src/chore/GenerationContext.py
Normal file
17
src/chore/GenerationContext.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import moviepy
|
||||||
|
|
||||||
|
from .. import engines
|
||||||
|
class GenerationContext:
|
||||||
|
|
||||||
|
def __init__(self, llmengine: engines.LLMEngine.BaseLLMEngine, scriptengine: engines.ScriptEngine.BaseScriptEngine, ttsengine: engines.TTSEngine.BaseTTSEngine) -> None:
|
||||||
|
self.llmengine = llmengine
|
||||||
|
self.llmengine.ctx = self
|
||||||
|
|
||||||
|
self.scriptengine = scriptengine
|
||||||
|
self.scriptengine.ctx = self
|
||||||
|
|
||||||
|
self.ttsengine = ttsengine
|
||||||
|
self.ttsengine.ctx = self
|
||||||
|
|
||||||
|
def process(self):
|
||||||
|
timed_script = self.scriptengine.generate()
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import openai
|
import openai
|
||||||
import gradio as gr
|
import gradio as gr
|
||||||
|
import orjson
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
@@ -15,10 +16,29 @@ class OpenaiLLMEngine(BaseLLMEngine):
|
|||||||
name = "OpenAI"
|
name = "OpenAI"
|
||||||
description = "OpenAI language model engine."
|
description = "OpenAI language model engine."
|
||||||
|
|
||||||
|
def __init__(self, options: list) -> None:
|
||||||
|
self.model = options[0]
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
def generate(self, system_prompt: str, chat_prompt: str, max_tokens: int = 512, temperature: float = 1.0, json_mode: bool= False, top_p: float = 1, frequency_penalty: float = 0, presence_penalty: float = 0) -> str:
|
def generate(self, system_prompt: str, chat_prompt: str, max_tokens: int = 512, temperature: float = 1.0, json_mode: bool= False, top_p: float = 1, frequency_penalty: float = 0, presence_penalty: float = 0) -> str:
|
||||||
... # TODO: Implement this method
|
response = openai.chat.completions.create(
|
||||||
|
model=self.model,
|
||||||
|
messages=[
|
||||||
|
{"role": "system", "content": system_prompt},
|
||||||
|
{"role": "user", "content": chat_prompt},
|
||||||
|
],
|
||||||
|
max_tokens=max_tokens,
|
||||||
|
temperature=temperature,
|
||||||
|
top_p=top_p,
|
||||||
|
frequency_penalty=frequency_penalty,
|
||||||
|
presence_penalty=presence_penalty,
|
||||||
|
response_format={ "type": "json_object" } if json_mode else openai._types.NOT_GIVEN
|
||||||
|
)
|
||||||
|
return response.choices[0].message.content if not json_mode else orjson.loads(response.choices[0].message.content)
|
||||||
|
|
||||||
def get_options(self) -> list:
|
|
||||||
|
@classmethod
|
||||||
|
def get_options(cls) -> list:
|
||||||
return [
|
return [
|
||||||
gr.Dropdown(
|
gr.Dropdown(
|
||||||
label="Model",
|
label="Model",
|
||||||
|
|||||||
@@ -1 +1,2 @@
|
|||||||
from .BaseLLMEngine import BaseLLMEngine
|
from .BaseLLMEngine import BaseLLMEngine
|
||||||
|
from .OpenaiLLMEngine import OpenaiLLMEngine
|
||||||
@@ -8,3 +8,6 @@ class BaseScriptEngine(BaseEngine):
|
|||||||
@abstractmethod
|
@abstractmethod
|
||||||
def generate(self) -> str:
|
def generate(self) -> str:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def time_script(self):
|
||||||
|
...
|
||||||
25
src/engines/ScriptEngine/CustomScriptEngine.py
Normal file
25
src/engines/ScriptEngine/CustomScriptEngine.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from .BaseScriptEngine import BaseScriptEngine
|
||||||
|
import gradio as gr
|
||||||
|
|
||||||
|
|
||||||
|
class CustomScriptEngine(BaseScriptEngine):
|
||||||
|
name = "Custom Script Engine"
|
||||||
|
description = "Generate a script with a custom provided prompt"
|
||||||
|
num_options = 1
|
||||||
|
|
||||||
|
def __init__(self, options: list[list | tuple | str | int | float | bool | None]):
|
||||||
|
self.script = options[0]
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
def generate(self, *args, **kwargs) -> str:
|
||||||
|
return self.script
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_options(cls) -> list:
|
||||||
|
return [
|
||||||
|
gr.Textbox(
|
||||||
|
label="Prompt",
|
||||||
|
placeholder="Enter your prompt here",
|
||||||
|
value="",
|
||||||
|
)
|
||||||
|
]
|
||||||
@@ -1,2 +1,3 @@
|
|||||||
from .BaseScriptEngine import BaseScriptEngine
|
from .BaseScriptEngine import BaseScriptEngine
|
||||||
from .ShowerThoughtsScriptEngine import ShowerThoughtsScriptEngine
|
from .ShowerThoughtsScriptEngine import ShowerThoughtsScriptEngine
|
||||||
|
from .CustomScriptEngine import CustomScriptEngine
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
from . import TTSEngine
|
from . import TTSEngine
|
||||||
from .BaseEngine import BaseEngine
|
from .BaseEngine import BaseEngine
|
||||||
from . import ScriptEngine
|
from . import ScriptEngine
|
||||||
|
from . import LLMEngine
|
||||||
|
|
||||||
ENGINES = {
|
ENGINES = {
|
||||||
|
"LLMEngine": [LLMEngine.OpenaiLLMEngine],
|
||||||
"TTSEngine": [TTSEngine.CoquiTTSEngine, TTSEngine.ElevenLabsTTSEngine],
|
"TTSEngine": [TTSEngine.CoquiTTSEngine, TTSEngine.ElevenLabsTTSEngine],
|
||||||
"ScriptEngine": [ScriptEngine.ShowerThoughtsScriptEngine],
|
"ScriptEngine": [ScriptEngine.ShowerThoughtsScriptEngine, ScriptEngine.CustomScriptEngine],
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ class GenerateUI:
|
|||||||
|
|
||||||
def get_generate_interface(self) -> gr.Blocks:
|
def get_generate_interface(self) -> gr.Blocks:
|
||||||
with gr.Blocks() as interface:
|
with gr.Blocks() as interface:
|
||||||
with gr.Row() as row:
|
with gr.Row(equal_height=False) as row:
|
||||||
inputs = []
|
inputs = []
|
||||||
with gr.Blocks() as col1:
|
with gr.Blocks() as col1:
|
||||||
for engine_type, engines in ENGINES.items():
|
for engine_type, engines in ENGINES.items():
|
||||||
|
|||||||
Reference in New Issue
Block a user