mirror of
https://github.com/yasirarism/MissKatyPyro.git
synced 2025-12-29 09:44:50 +00:00
Try fix google cmd and add minor fix
This commit is contained in:
parent
7711d032c5
commit
45e38993f9
8 changed files with 29 additions and 26 deletions
|
|
@ -24,7 +24,7 @@ from misskaty.vars import (
|
|||
|
||||
basicConfig(
|
||||
level=INFO,
|
||||
format="[%(asctime)s - %(levelname)s] - %(name)s.%(funcName)s - %(message)s",
|
||||
format="[%(levelname)s] - [%(asctime)s - %(name)s - %(message)s] -> [%(module)s:%(lineno)d]",
|
||||
datefmt="%d-%b-%y %H:%M:%S",
|
||||
handlers=[
|
||||
handlers.RotatingFileHandler("MissKatyLogs.txt", mode="w+", maxBytes=1000000),
|
||||
|
|
@ -41,7 +41,7 @@ MOD_NOLOAD = ["subscene_dl"]
|
|||
HELPABLE = {}
|
||||
cleanmode = {}
|
||||
botStartTime = time.time()
|
||||
misskaty_version = "v2.10.13 - Stable"
|
||||
misskaty_version = "v2.10.14 - Stable"
|
||||
|
||||
# Pyrogram Bot Client
|
||||
app = Client(
|
||||
|
|
@ -50,6 +50,10 @@ app = Client(
|
|||
api_hash=API_HASH,
|
||||
bot_token=BOT_TOKEN,
|
||||
mongodb=dict(connection=AsyncClient(DATABASE_URI), remove_peers=False),
|
||||
sleep_threshold=180,
|
||||
app_version="MissKatyPyro Stable",
|
||||
max_concurrent_transmissions=5,
|
||||
workers=50,
|
||||
)
|
||||
|
||||
# Pyrogram UserBot Client
|
||||
|
|
@ -57,6 +61,7 @@ user = Client(
|
|||
"YasirUBot",
|
||||
session_string=USER_SESSION,
|
||||
mongodb=dict(connection=AsyncClient(DATABASE_URI), remove_peers=False),
|
||||
sleep_threshold=180,
|
||||
)
|
||||
|
||||
jobstores = {
|
||||
|
|
|
|||
|
|
@ -417,7 +417,7 @@ async def cmd_eval(self: Client, ctx: Message, strings) -> Optional[str]:
|
|||
"send": send,
|
||||
"stdout": out_buf,
|
||||
"traceback": traceback,
|
||||
"http": fetch,
|
||||
"fetch": fetch,
|
||||
"replied": ctx.reply_to_message,
|
||||
"requests": requests,
|
||||
"help": _help,
|
||||
|
|
@ -482,7 +482,7 @@ async def cmd_eval(self: Client, ctx: Message, strings) -> Optional[str]:
|
|||
else:
|
||||
await edit_or_reply(
|
||||
ctx,
|
||||
text=final_output,
|
||||
text=f"<code>{final_output}</code>",
|
||||
parse_mode=enums.ParseMode.HTML,
|
||||
reply_markup=InlineKeyboardMarkup(
|
||||
[
|
||||
|
|
|
|||
|
|
@ -145,15 +145,15 @@ async def genss(self: Client, ctx: Message, strings):
|
|||
dc_id = FileId.decode(media.file_id).dc_id
|
||||
try:
|
||||
dl = await replied.download(
|
||||
file_name="/downloads/",
|
||||
file_name="downloads/",
|
||||
progress=progress_for_pyrogram,
|
||||
progress_args=(strings("dl_progress"), process, c_time, dc_id),
|
||||
)
|
||||
except FileNotFoundError:
|
||||
return await process.edit_msg(
|
||||
"ERROR: FileNotFound, maybe you're spam bot with same file."
|
||||
"ERROR: FileNotFound."
|
||||
)
|
||||
the_real_download_location = os.path.join("/downloads/", os.path.basename(dl))
|
||||
the_real_download_location = os.path.join("downloads/", os.path.basename(dl))
|
||||
if the_real_download_location is not None:
|
||||
try:
|
||||
await process.edit_msg(
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
# * @date 2023-06-21 22:12:27
|
||||
# * @projectName MissKatyPyro
|
||||
# * Copyright ©YasirPedia All rights reserved
|
||||
import html
|
||||
import json
|
||||
import re
|
||||
import traceback
|
||||
|
|
@ -228,17 +229,15 @@ async def inline_menu(_, inline_query: InlineQuery):
|
|||
)
|
||||
soup = BeautifulSoup(search_results.text, "lxml")
|
||||
data = []
|
||||
for result in soup.find_all("div", class_="kvH3mc BToiNc UK95Uc"):
|
||||
link = result.find("div", class_="yuRUbf").find("a").get("href")
|
||||
title = result.find("div", class_="yuRUbf").find("h3").get_text()
|
||||
for result in soup.select(".tF2Cxc"):
|
||||
link = result.select_one(".yuRUbf a")["href"]
|
||||
title = result.select_one(".DKV0Md").text
|
||||
try:
|
||||
snippet = result.find(
|
||||
"div", class_="VwiC3b yXK7lf MUxGbd yDYNvb lyLwlc lEBKkf"
|
||||
).get_text()
|
||||
snippet = result.select_one("#rso .lyLwlc").text
|
||||
except:
|
||||
snippet = "-"
|
||||
message_text = f"<a href='{link}'>{title}</a>\n"
|
||||
message_text += f"Deskription: {snippet}"
|
||||
message_text += f"Deskription: {html.escape(snippet)}"
|
||||
data.append(
|
||||
InlineQueryResultArticle(
|
||||
title=f"{title}",
|
||||
|
|
|
|||
|
|
@ -46,15 +46,15 @@ async def mediainfo(_, ctx: Message, strings):
|
|||
dc_id = FileId.decode(file_info.file_id).dc_id
|
||||
try:
|
||||
dl = await ctx.reply_to_message.download(
|
||||
file_name="/downloads/",
|
||||
file_name="downloads/",
|
||||
progress=progress_for_pyrogram,
|
||||
progress_args=(strings("dl_args_text"), process, c_time, dc_id),
|
||||
)
|
||||
except FileNotFoundError:
|
||||
return await process.edit_msg(
|
||||
"ERROR: FileNotFound, maybe you're spam bot with same file."
|
||||
"ERROR: FileNotFound."
|
||||
)
|
||||
file_path = path.join("/downloads/", path.basename(dl))
|
||||
file_path = path.join("downloads/", path.basename(dl))
|
||||
output_ = await runcmd(f'mediainfo "{file_path}"')
|
||||
out = output_[0] if len(output_) != 0 else None
|
||||
body_text = f"""
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@
|
|||
"""
|
||||
|
||||
import asyncio
|
||||
import html
|
||||
import httpx
|
||||
import json
|
||||
import os
|
||||
|
|
@ -207,13 +208,11 @@ async def gsearch(_, message):
|
|||
# collect data
|
||||
data = []
|
||||
|
||||
for result in soup.find_all("div", class_="kvH3mc BToiNc UK95Uc"):
|
||||
link = result.find("div", class_="yuRUbf").find("a").get("href")
|
||||
title = result.find("div", class_="yuRUbf").find("h3").get_text()
|
||||
for result in soup.select(".tF2Cxc"):
|
||||
link = result.select_one(".yuRUbf a")["href"]
|
||||
title = result.select_one(".DKV0Md").text
|
||||
try:
|
||||
snippet = result.find(
|
||||
"div", class_="VwiC3b yXK7lf MUxGbd yDYNvb lyLwlc lEBKkf"
|
||||
).get_text()
|
||||
snippet = result.select_one("#rso .lyLwlc").text
|
||||
except:
|
||||
snippet = "-"
|
||||
|
||||
|
|
@ -229,7 +228,7 @@ async def gsearch(_, message):
|
|||
parse = json.loads(arr)
|
||||
total = len(parse)
|
||||
res = "".join(
|
||||
f"<a href='{i['link']}'>{i['title']}</a>\n{i['snippet']}\n\n" for i in parse
|
||||
f"<a href='{i['link']}'>{i['title']}</a>\n{html.escape(i['snippet'])}\n\n" for i in parse
|
||||
)
|
||||
except Exception:
|
||||
exc = traceback.format_exc()
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@ async def ytdl_gendl_callback(self: Client, cq: CallbackQuery, strings):
|
|||
else:
|
||||
yt_url = True
|
||||
video_link = f"{YT_VID_URL}{match[1]}"
|
||||
|
||||
LOGGER.info(f"User {cq.from_user.id} using YTDL -> {video_link}")
|
||||
media_type = "video" if match[3] == "v" else "audio"
|
||||
uid, _ = ytdl.get_choice_by_id(match[2], media_type, yt_url=yt_url)
|
||||
key = await ytdl.download(
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ if os.path.exists("MissKatyLogs.txt"):
|
|||
|
||||
basicConfig(
|
||||
level=INFO,
|
||||
format="[%(asctime)s - %(levelname)s] - %(name)s.%(funcName)s - %(message)s",
|
||||
format="[%(levelname)s] - [%(asctime)s - %(name)s - %(message)s] -> [%(module)s:%(lineno)d]",
|
||||
datefmt="%d-%b-%y %H:%M:%S",
|
||||
handlers=[
|
||||
handlers.RotatingFileHandler("MissKatyLogs.txt", mode="w+", maxBytes=1000000),
|
||||
|
|
|
|||
Loading…
Reference in a new issue