|
|
@@ -22,7 +22,7 @@ TEMPERATURE = float(os.getenv('TEMPERATURE', "1.0"))
|
|
|
# Initialiser le client OpenAI asynchrone ici
|
|
|
openai_client = AsyncOpenAI(api_key=OPENAI_API_KEY, base_url=URL_OPENAI_API)
|
|
|
|
|
|
-BOT_VERSION = "2.5.3-penta"
|
|
|
+BOT_VERSION = "2.6.0-penta"
|
|
|
|
|
|
# Vérifier que les tokens et le prompt de personnalité sont récupérés
|
|
|
if DISCORD_TOKEN is None or OPENAI_API_KEY is None or DISCORD_CHANNEL_ID is None:
|
|
|
@@ -67,14 +67,38 @@ def filter_message(message):
|
|
|
elif len(message.split('</response>')) > 1:
|
|
|
result = message.split('</response>')[1]
|
|
|
|
|
|
+ result.rstrip("</s>")
|
|
|
+
|
|
|
return result
|
|
|
|
|
|
-def transorm_emote(message, output : bool):
|
|
|
+def transform_emote(message: str, output: bool) -> str:
|
|
|
"""Remplace les smileys par les codes Discord correspondant"""
|
|
|
|
|
|
- result = message
|
|
|
+ list_emote = [
|
|
|
+ (":hap:", "<:hap:355854929073537026>"),
|
|
|
+ (":angryvault:", "<:angryvault:585550568806940672>"),
|
|
|
+ (":minou:", "<:minou:358054423462936576>"),
|
|
|
+ (":cetaitsur:", "<a:cetaitsur:826102032963469324>"),
|
|
|
+ (":eh:", "<:eh:395979132896280576>"),
|
|
|
+ (":desu:", "<:desu:388007643077410837>"),
|
|
|
+ (":bave2:", "<:bave2:412252920558387221>"),
|
|
|
+ (":haptriste:", "<:haptriste:358054014262181889>"),
|
|
|
+ (":perplexe:", "<:perplexe:358054891274371082>"),
|
|
|
+ (":sueur:", "<:sueur:358051940631838721>"),
|
|
|
+ (":chien:", "<:chien:507606737646518293>"),
|
|
|
+ (":kemar:", "<:kemar:419607012796792842>"),
|
|
|
+ (":ouch2:", "<:ouch2:777984650710745138>"),
|
|
|
+ (":coeur:", "<:coeur:355853389399195649>"),
|
|
|
+ (":what:", "<:what:587019571207077928>")
|
|
|
+ ]
|
|
|
+
|
|
|
+ for smiley, discord_code in list_emote:
|
|
|
+ if output:
|
|
|
+ message = message.replace(smiley, discord_code)
|
|
|
+ else:
|
|
|
+ message = message.replace(discord_code, smiley)
|
|
|
|
|
|
- return result
|
|
|
+ return message
|
|
|
|
|
|
def split_message(message, max_length=2000):
|
|
|
"""Divise un message en plusieurs segments de longueur maximale spécifiée."""
|
|
|
@@ -174,7 +198,7 @@ async def call_openai_api(user_text, user_name, detail='high'):
|
|
|
message_to_send = {
|
|
|
"role": "user",
|
|
|
"content": [
|
|
|
- {"type": "text", "text": f"{user_name} dit : {transorm_emote(user_text, False)}"}
|
|
|
+ {"type": "text", "text": f"{user_name} dit : {transform_emote(user_text, False)}"}
|
|
|
]
|
|
|
}
|
|
|
|
|
|
@@ -292,10 +316,9 @@ async def on_message(message):
|
|
|
|
|
|
if result:
|
|
|
reply = result.choices[0].message.content
|
|
|
- reply = reply.rstrip("</s>")
|
|
|
|
|
|
reply = filter_message(reply)
|
|
|
- reply = transorm_emote(reply, True)
|
|
|
+ reply = transform_emote(reply, True)
|
|
|
|
|
|
message_parts = split_message(reply)
|
|
|
|