This commit is contained in:
Yasir Aris M 2024-08-20 23:04:03 +07:00 committed by GitHub
parent 2923f3674f
commit e0ab4efa85
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -4,18 +4,16 @@
# * Copyright ©YasirPedia All rights reserved # * Copyright ©YasirPedia All rights reserved
import asyncio import asyncio
import html import html
import json
import random
from cachetools import TTLCache from cachetools import TTLCache
from openai import APIConnectionError, APIStatusError, AsyncOpenAI, RateLimitError from openai import APIConnectionError, APIStatusError, AsyncOpenAI, RateLimitError
from pyrogram import filters, utils from pyrogram import filters
from pyrogram.errors import MessageTooLong from pyrogram.errors import MessageTooLong
from pyrogram.types import Message from pyrogram.types import Message
from misskaty import app from misskaty import app
from misskaty.core import pyro_cooldown from misskaty.core import pyro_cooldown
from misskaty.helper import check_time_gap, fetch, post_to_telegraph, use_chat_lang from misskaty.helper import check_time_gap, post_to_telegraph, use_chat_lang
from misskaty.vars import COMMAND_HANDLER, GOOGLEAI_KEY, OPENAI_KEY, SUDO from misskaty.vars import COMMAND_HANDLER, GOOGLEAI_KEY, OPENAI_KEY, SUDO
__MODULE__ = "ChatBot" __MODULE__ = "ChatBot"
@ -29,25 +27,17 @@ gemini_conversations = TTLCache(maxsize=4000, ttl=24*60*60)
async def get_openai_stream_response(is_stream, key, base_url, model, messages, bmsg, strings): async def get_openai_stream_response(is_stream, key, base_url, model, messages, bmsg, strings):
ai = AsyncOpenAI(api_key=key, base_url=base_url) ai = AsyncOpenAI(api_key=key, base_url=base_url)
if is_stream:
response = await ai.chat.completions.create( response = await ai.chat.completions.create(
model=model, model=model,
messages=messages, messages=messages,
temperature=0.7, temperature=0.7,
stream=True, stream=is_stream,
)
else:
response = await ai.chat.completions.create(
extra_body={"model":model},
model=model,
messages=messages,
temperature=0.7,
) )
answer = "" answer = ""
num = 0 num = 0
try: try:
if not is_stream: if not is_stream:
await bmsg.edit_msg(f"{response.choices[0].message.content}\n<b>Powered by:</b> <code>Gemini 1.5 Flash</code>") await bmsg.edit_msg(f"{html.escape(response.choices[0].message.content)}\n<b>Powered by:</b> <code>Gemini 1.5 Flash</code>")
answer += response.choices[0].message.content answer += response.choices[0].message.content
else: else:
async for chunk in response: async for chunk in response: