mirror of
https://github.com/yasirarism/MissKatyPyro.git
synced 2025-12-29 09:44:50 +00:00
Refactor chatbot ai plugins
Signed-off-by: Yasir Aris M <git@yasirdev.my.id>
This commit is contained in:
parent
a4146c1157
commit
8be4aa8f14
1 changed files with 52 additions and 47 deletions
|
|
@ -24,29 +24,42 @@ __HELP__ = """
|
||||||
/ask - Generate text response from AI using OpenAI.
|
/ask - Generate text response from AI using OpenAI.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
user_conversations = TTLCache(maxsize=4000, ttl=24*60*60)
|
duckai_conversations = TTLCache(maxsize=4000, ttl=24*60*60)
|
||||||
|
gemini_conversations = TTLCache(maxsize=4000, ttl=24*60*60)
|
||||||
|
|
||||||
async def get_openai_stream_response(messages, bmsg):
|
async def get_openai_stream_response(is_stream, key, base_url, model, messages, bmsg, strings):
|
||||||
ai = AsyncOpenAI(api_key=OPENAI_KEY, base_url="https://duckai.yasirapi.eu.org/v1")
|
ai = AsyncOpenAI(api_key=key, base_url=base_url)
|
||||||
response = await ai.chat.completions.create(
|
if is_stream:
|
||||||
model="gpt-4o-mini",
|
response = await ai.chat.completions.create(
|
||||||
messages=messages,
|
model=model,
|
||||||
temperature=0.7,
|
messages=messages,
|
||||||
stream=True,
|
temperature=0.7,
|
||||||
)
|
stream=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
response = await ai.chat.completions.create(
|
||||||
|
extra_body={"model":model},
|
||||||
|
model=model,
|
||||||
|
messages=messages,
|
||||||
|
temperature=0.7,
|
||||||
|
)
|
||||||
answer = ""
|
answer = ""
|
||||||
num = 0
|
num = 0
|
||||||
try:
|
try:
|
||||||
async for chunk in response:
|
if is_stream:
|
||||||
if not chunk.choices or not chunk.choices[0].delta.content:
|
await bmsg.edit_msg(f"{response.choices[0].message.content}\n\n<b>Powered by:</b> <code>Gemini 1.5 Flash</code>")
|
||||||
continue
|
answer += response.choices[0].message.content
|
||||||
num += 1
|
else:
|
||||||
answer += chunk.choices[0].delta.content
|
async for chunk in response:
|
||||||
if num == 30:
|
if not chunk.choices or not chunk.choices[0].delta.content:
|
||||||
await bmsg.edit_msg(html.escape(answer))
|
continue
|
||||||
await asyncio.sleep(1.5)
|
num += 1
|
||||||
num = 0
|
answer += chunk.choices[0].delta.content
|
||||||
await bmsg.edit_msg(f"{html.escape(answer)}\n\n<b>Powered by:</b> <code>GPT 4o Mini</code>")
|
if num == 30:
|
||||||
|
await bmsg.edit_msg(html.escape(answer))
|
||||||
|
await asyncio.sleep(1.5)
|
||||||
|
num = 0
|
||||||
|
await bmsg.edit_msg(f"{html.escape(answer)}\n\n<b>Powered by:</b> <code>GPT 4o Mini</code>")
|
||||||
except MessageTooLong:
|
except MessageTooLong:
|
||||||
answerlink = await post_to_telegraph(
|
answerlink = await post_to_telegraph(
|
||||||
False, "MissKaty ChatBot ", html.escape(f"<code>{answer}</code>")
|
False, "MissKaty ChatBot ", html.escape(f"<code>{answer}</code>")
|
||||||
|
|
@ -88,27 +101,19 @@ async def gemini_chatbot(_, ctx: Message, strings):
|
||||||
)
|
)
|
||||||
if not GOOGLEAI_KEY:
|
if not GOOGLEAI_KEY:
|
||||||
return await ctx.reply_msg("GOOGLEAI_KEY env is missing!!!")
|
return await ctx.reply_msg("GOOGLEAI_KEY env is missing!!!")
|
||||||
|
uid = ctx.from_user.id if ctx.from_user else ctx.sender_chat.id
|
||||||
msg = await ctx.reply_msg(strings("find_answers_str"), quote=True)
|
msg = await ctx.reply_msg(strings("find_answers_str"), quote=True)
|
||||||
try:
|
if uid not in gemini_conversations:
|
||||||
data = {"query": ctx.text.split(maxsplit=1)[1], "key": GOOGLEAI_KEY, "system_instructions": "Kamu adalah AI dengan karakter mirip kucing bernama MissKaty AI yang diciptakan oleh Yasir untuk membantu manusia mencari informasi."}
|
gemini_conversations[uid] = [{"role": "system", "content": "Kamu adalah AI dengan karakter mirip kucing bernama MissKaty AI yang diciptakan oleh Yasir untuk membantu manusia mencari informasi."}, {"role": "user", "content": ctx.input}]
|
||||||
# Fetch from API beacuse my VPS is not supported
|
else:
|
||||||
response = await fetch.post("https://yasirapi.eu.org/gemini", data=data)
|
gemini_conversations[uid].append({"role": "user", "content": ctx.input})
|
||||||
if not response.json().get("candidates"):
|
ai_response = await get_openai_stream_response(False, GOOGLEAI_KEY, "https://gemini.yasirapi.eu.org/v1", "gemini-1.5-flash", gemini_conversations[uid], msg, strings)
|
||||||
await ctx.reply_msg(
|
if not ai_response:
|
||||||
"⚠️ Sorry, the prompt you sent maybe contains a forbidden word that is not permitted by AI."
|
gemini_conversations[uid].pop()
|
||||||
)
|
if len(gemini_conversations[uid]) == 1:
|
||||||
else:
|
gemini_conversations.pop(uid)
|
||||||
await ctx.reply_msg(
|
return
|
||||||
html.escape(
|
gemini_conversations[uid].append({"role": "assistant", "content": ai_response})
|
||||||
response.json()["candidates"][0]["content"]["parts"][0]["text"]
|
|
||||||
)
|
|
||||||
+ "\n<b>Powered by:</b> <code>Gemini Flash 1.5</code>"
|
|
||||||
)
|
|
||||||
await msg.delete()
|
|
||||||
except Exception as e:
|
|
||||||
await ctx.reply_msg(str(e))
|
|
||||||
await msg.delete()
|
|
||||||
|
|
||||||
|
|
||||||
@app.on_message(filters.command("ask", COMMAND_HANDLER) & pyro_cooldown.wait(10))
|
@app.on_message(filters.command("ask", COMMAND_HANDLER) & pyro_cooldown.wait(10))
|
||||||
@use_chat_lang()
|
@use_chat_lang()
|
||||||
|
|
@ -125,14 +130,14 @@ async def openai_chatbot(self, ctx: Message, strings):
|
||||||
return await ctx.reply_msg(strings("dont_spam"), del_in=5)
|
return await ctx.reply_msg(strings("dont_spam"), del_in=5)
|
||||||
pertanyaan = ctx.input
|
pertanyaan = ctx.input
|
||||||
msg = await ctx.reply_msg(strings("find_answers_str"), quote=True)
|
msg = await ctx.reply_msg(strings("find_answers_str"), quote=True)
|
||||||
if uid not in user_conversations:
|
if uid not in duckai_conversations:
|
||||||
user_conversations[uid] = [{"role": "user", "content": pertanyaan}]
|
duckai_conversations[uid] = [{"role": "system", "content": "Kamu adalah AI dengan karakter mirip kucing bernama MissKaty AI yang diciptakan oleh Yasir untuk membantu manusia mencari informasi."}, {"role": "user", "content": pertanyaan}]
|
||||||
else:
|
else:
|
||||||
user_conversations[uid].append({"role": "user", "content": pertanyaan})
|
duckai_conversations[uid].append({"role": "user", "content": pertanyaan})
|
||||||
ai_response = await get_openai_stream_response(user_conversations[uid], msg)
|
ai_response = await get_openai_stream_response(True, OPENAI_KEY, "https://duckai.yasirapi.eu.org/v1", "gpt-4o-mini", duckai_conversations[uid], msg, strings)
|
||||||
if not ai_response:
|
if not ai_response:
|
||||||
user_conversations[user_id].pop()
|
duckai_conversations[uid].pop()
|
||||||
if len(user_conversations[user_id]) == 1:
|
if len(duckai_conversations[uid]) == 1:
|
||||||
user_conversations.pop(user_id)
|
duckai_conversations.pop(uid)
|
||||||
return
|
return
|
||||||
user_conversations[uid].append({"role": "assistant", "content": ai_response})
|
duckai_conversations[uid].append({"role": "assistant", "content": ai_response})
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue