mirror of
https://github.com/Paillat-dev/Botator.git
synced 2026-01-02 09:16:19 +00:00
✨ feat(server.ts): add support for process.env.PORT environment variable to be able to run app on a configurable port 🐛 fix(main.py): remove duplicate cog addition in main.py ✨ feat(main.py): add cogs.Help(bot) to the list of cogs in main.py 🐛 fix(main.py): remove redundant import statements in main.py ✨ feat(main.py): add on_guild_remove event handler in main.py ✨ feat(main.py): add on_guild_join event handler in main.py ✨ feat(main.py): add support for discord.Intents in main.py ✨ feat(main.py): add intents.message_content = True in main.py ✨ feat(main.py): add intents.default() in main.py ✨ feat(main.py): add discord.Bot(intents=intents, help_command=None) in main.py ✨ feat(main.py): add import statements in main.py ✨ feat(main.py): add from src.config import debug, discord_token in main.py ✨ feat(main.py): add import discord in main.py ✨ feat(main.py): add import src.config in main.py ✨ feat(main.py): add import src.cogs in main.py ✨ feat(main.py): add import src.cogs.chat in main.py ✨ feat(main.py): add import src.cogs.manage_chat in main.py ✨ feat(main.py): add import src.cogs.moderation in main.py ✨ feat(main.py): add import src.cogs.channelSetup in main.py ✨ feat(main.py): add import src.cogs.help in main.py ✨ feat(main.py): add import src.cogs.Chat in main.py ✨ feat(main.py): add import src.cogs.ManageChat in main.py ✨ feat(main.py): add import src.cogs.Moderation in main.py ✨ feat(main.py): add import src.cogs.ChannelSetup in main.py ✨ feat(main.py): add import src.cogs.Help in main.py ✨ feat(main.py): add import src.cogs.chat in main.py ✨ feat(main.py): add import src.cogs.manage_chat in main.py ✨ feat(main.py): add import src.cogs.moderation in main.py ✨ feat(main.py): add
35 lines
1.1 KiB
Python
35 lines
1.1 KiB
Python
import replicate
|
|
import asyncio
|
|
|
|
|
|
class ReplicatePredictor:
|
|
def __init__(self, api_key, model_name, version_hash):
|
|
self.api_key = api_key
|
|
self.model_name = model_name
|
|
self.version_hash = version_hash
|
|
self.client = replicate.Client(api_token=self.api_key)
|
|
self.model = self.client.models.get(self.model_name)
|
|
self.version = self.model.versions.get(self.version_hash)
|
|
|
|
def prediction_thread(self, prompt, stop=None):
|
|
output = self.client.predictions.create(
|
|
version=self.version,
|
|
input={"prompt": prompt},
|
|
)
|
|
finaloutput = ""
|
|
for out in output.output_iterator():
|
|
finaloutput += out
|
|
if stop != None and finaloutput.find(stop) != -1:
|
|
output.cancel()
|
|
if stop != None:
|
|
return finaloutput.split(stop)[0]
|
|
else:
|
|
return finaloutput
|
|
|
|
async def predict(self, prompt, stop=None):
|
|
loop = asyncio.get_running_loop()
|
|
result = await loop.run_in_executor(
|
|
None, lambda: self.prediction_thread(prompt, stop)
|
|
)
|
|
return result
|