mirror of
https://github.com/yasirarism/MissKatyPyro.git
synced 2026-01-05 19:44:51 +00:00
Clean Up
This commit is contained in:
parent
2923f3674f
commit
e0ab4efa85
1 changed files with 9 additions and 19 deletions
|
|
@ -4,18 +4,16 @@
|
||||||
# * Copyright ©YasirPedia All rights reserved
|
# * Copyright ©YasirPedia All rights reserved
|
||||||
import asyncio
|
import asyncio
|
||||||
import html
|
import html
|
||||||
import json
|
|
||||||
import random
|
|
||||||
|
|
||||||
from cachetools import TTLCache
|
from cachetools import TTLCache
|
||||||
from openai import APIConnectionError, APIStatusError, AsyncOpenAI, RateLimitError
|
from openai import APIConnectionError, APIStatusError, AsyncOpenAI, RateLimitError
|
||||||
from pyrogram import filters, utils
|
from pyrogram import filters
|
||||||
from pyrogram.errors import MessageTooLong
|
from pyrogram.errors import MessageTooLong
|
||||||
from pyrogram.types import Message
|
from pyrogram.types import Message
|
||||||
|
|
||||||
from misskaty import app
|
from misskaty import app
|
||||||
from misskaty.core import pyro_cooldown
|
from misskaty.core import pyro_cooldown
|
||||||
from misskaty.helper import check_time_gap, fetch, post_to_telegraph, use_chat_lang
|
from misskaty.helper import check_time_gap, post_to_telegraph, use_chat_lang
|
||||||
from misskaty.vars import COMMAND_HANDLER, GOOGLEAI_KEY, OPENAI_KEY, SUDO
|
from misskaty.vars import COMMAND_HANDLER, GOOGLEAI_KEY, OPENAI_KEY, SUDO
|
||||||
|
|
||||||
__MODULE__ = "ChatBot"
|
__MODULE__ = "ChatBot"
|
||||||
|
|
@ -29,25 +27,17 @@ gemini_conversations = TTLCache(maxsize=4000, ttl=24*60*60)
|
||||||
|
|
||||||
async def get_openai_stream_response(is_stream, key, base_url, model, messages, bmsg, strings):
|
async def get_openai_stream_response(is_stream, key, base_url, model, messages, bmsg, strings):
|
||||||
ai = AsyncOpenAI(api_key=key, base_url=base_url)
|
ai = AsyncOpenAI(api_key=key, base_url=base_url)
|
||||||
if is_stream:
|
|
||||||
response = await ai.chat.completions.create(
|
response = await ai.chat.completions.create(
|
||||||
model=model,
|
model=model,
|
||||||
messages=messages,
|
messages=messages,
|
||||||
temperature=0.7,
|
temperature=0.7,
|
||||||
stream=True,
|
stream=is_stream,
|
||||||
)
|
|
||||||
else:
|
|
||||||
response = await ai.chat.completions.create(
|
|
||||||
extra_body={"model":model},
|
|
||||||
model=model,
|
|
||||||
messages=messages,
|
|
||||||
temperature=0.7,
|
|
||||||
)
|
)
|
||||||
answer = ""
|
answer = ""
|
||||||
num = 0
|
num = 0
|
||||||
try:
|
try:
|
||||||
if not is_stream:
|
if not is_stream:
|
||||||
await bmsg.edit_msg(f"{response.choices[0].message.content}\n<b>Powered by:</b> <code>Gemini 1.5 Flash</code>")
|
await bmsg.edit_msg(f"{html.escape(response.choices[0].message.content)}\n<b>Powered by:</b> <code>Gemini 1.5 Flash</code>")
|
||||||
answer += response.choices[0].message.content
|
answer += response.choices[0].message.content
|
||||||
else:
|
else:
|
||||||
async for chunk in response:
|
async for chunk in response:
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue