importtoxicityastox#this is a file called toxicity.py, which contains the toxicity function that allows you to check if a message is toxic or not (it uses the perspective api)
classModeration(discord.Cog):
def__init__(self,bot:discord.Bot)->None:
super().__init__()
self.bot=bot
@discord.slash_command(name="moderation",description="Enable or disable AI moderation & set the rules")
@discord.option(name="enable",description="Enable or disable AI moderation",reqired=True,)
@discord.option(name="log_channel",description="The channel where the moderation logs will be sent",required=True)
@discord.option(name="moderator_role",description="The role of the moderators",required=True)
#for each value we check if it's none. If it's none and there's no value in the database, we set it to 0.40, otherwise we set it to the value in the database
#we also do that with the manage_messages permission, so the moderators can't be moderated
ifmessage.author.guild_permissions.manage_messages:return#if the user is a moderator, we don't want to moderate him because he is allowed to say whatever he wants because he is just like a dictator
ifmessage.author.guild_permissions.administrator:return#if the user is an administrator, we don't want to moderate him because he is allowed to say whatever he wants because he is a DICTATOR
embed=discord.Embed(title="Message deleted",description=f"Your message was deleted because it was too toxic. The following reasons were found: **{'**, **'.join(reasons_to_delete)}**",color=discord.Color.red())
embed=discord.Embed(title="Message deleted",description=f"**{message.author}**'s message ***{content}*** was deleted because it was too toxic. The following reasons were found:",color=discord.Color.red())
awaitmessage.reply(f"{moderator_role.mention} This message might be toxic. The following reasons were found: **{'**, **'.join(reasons_to_suspicous)}**",delete_after=15,mention_author=False)
embed=discord.Embed(title="Message suspicious",description=f"**{message.author}**'s message [***{content}***]({message.jump_url}) might be toxic. The following reasons were found:",color=discord.Color.orange())
ifwould_have_been_deleted!=[]:embed=discord.Embed(title="Toxicity",description=f"Here are the different toxicity scores of the message\n***{message}***",color=discord.Color.red())
elifwould_have_been_suspicous!=[]andwould_have_been_deleted==[]:embed=discord.Embed(title="Toxicity",description=f"Here are the different toxicity scores of the message\n***{message}***",color=discord.Color.orange())
else:embed=discord.Embed(title="Toxicity",description=f"Here are the different toxicity scores of the message\n***{message}***",color=discord.Color.green())
ifwould_have_been_deleted!=[]:embed.add_field(name="Would have been deleted",value=f"Yes, the message would have been deleted because of the following toxicity scores: **{'**, **'.join(would_have_been_deleted)}**",inline=False)
ifwould_have_been_suspicous!=[]andwould_have_been_deleted==[]:embed.add_field(name="Would have been marked as suspicious",value=f"Yes, the message would have been marked as suspicious because of the following toxicity scores: {', '.join(would_have_been_suspicous)}",inline=False)
awaitctx.respond(embed=embed)
@discord.slash_command(name="moderation_help",description="Get help with the moderation AI")