Fix httpx exception and use contextlib to handle some exception

Signed-off-by: Yasir Aris M <git@yasirdev.my.id>
This commit is contained in:
Yasir Aris M 2023-10-18 11:26:06 +07:00
parent 6bff71a082
commit 298a076362
3 changed files with 701 additions and 676 deletions

File diff suppressed because it is too large Load diff

View file

@ -6,10 +6,12 @@
""" """
import asyncio import asyncio
import contextlib
import html import html
import json import json
import os import os
import re import re
import sys
import traceback import traceback
from logging import getLogger from logging import getLogger
from urllib.parse import quote from urllib.parse import quote
@ -207,12 +209,13 @@ async def carbon_make(self: Client, ctx: Message):
"code": text, "code": text,
"backgroundColor": "#1F816D", "backgroundColor": "#1F816D",
} }
try: with contextlib.redirect_stdout(sys.stderr):
response = await fetch.post( try:
"https://carbon.yasirapi.eu.org/api/cook", json=json_data, timeout=20 response = await fetch.post(
) "https://carbon.yasirapi.eu.org/api/cook", json=json_data, timeout=20
except httpx.HTTPError as exc: )
return await ctx.reply_msg(f"HTTP Exception for {exc.request.url} - {exc}") except httpx.HTTPError as exc:
return await ctx.reply_msg(f"HTTP Exception for {exc.request.url} - {exc}")
if response.status_code != 200: if response.status_code != 200:
return await ctx.reply_photo( return await ctx.reply_photo(
f"https://http.cat/{response.status_code}", f"https://http.cat/{response.status_code}",
@ -561,17 +564,13 @@ async def who_is(client, message):
async def close_callback(_, query: CallbackQuery): async def close_callback(_, query: CallbackQuery):
_, userid = query.data.split("#") _, userid = query.data.split("#")
if query.from_user.id != int(userid): if query.from_user.id != int(userid):
try: with contextlib.suppress(QueryIdInvalid):
return await query.answer("⚠️ Access Denied!", True) return await query.answer("⚠️ Access Denied!", True)
except QueryIdInvalid: with contextlib.redirect_stdout(Exception):
return
try:
await query.answer("Deleting this message in 5 seconds.") await query.answer("Deleting this message in 5 seconds.")
await asyncio.sleep(5) await asyncio.sleep(5)
await query.message.delete() await query.message.delete()
await query.message.reply_to_message.delete() await query.message.reply_to_message.delete()
except:
pass
async def mdlapi(title): async def mdlapi(title):

View file

@ -4,9 +4,11 @@
* @projectName MissKatyPyro * @projectName MissKatyPyro
* Copyright @YasirPedia All rights reserved * Copyright @YasirPedia All rights reserved
""" """
import contextlib
import httpx import httpx
import logging import logging
import re import re
import sys
import traceback import traceback
import cloudscraper import cloudscraper
@ -71,15 +73,16 @@ def split_arr(arr, size: 5):
# Terbit21 GetData # Terbit21 GetData
async def getDataTerbit21(msg, kueri, CurrentPage, strings): async def getDataTerbit21(msg, kueri, CurrentPage, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
try: with contextlib.redirect_stdout(sys.stderr):
if kueri: try:
terbitjson = await fetch.get(f"{web['yasirapi']}/terbit21?q={kueri}") if kueri:
else: terbitjson = await fetch.get(f"{web['yasirapi']}/terbit21?q={kueri}")
terbitjson = await fetch.get(f"{web['yasirapi']}/terbit21") else:
terbitjson.raise_for_status() terbitjson = await fetch.get(f"{web['yasirapi']}/terbit21")
except httpx.HTTPError as exc: terbitjson.raise_for_status()
await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>") except httpx.HTTPError as exc:
return None, None await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>")
return None, None
res = terbitjson.json() res = terbitjson.json()
if not res.get("result"): if not res.get("result"):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
@ -108,15 +111,16 @@ async def getDataTerbit21(msg, kueri, CurrentPage, strings):
# LK21 GetData # LK21 GetData
async def getDatalk21(msg, kueri, CurrentPage, strings): async def getDatalk21(msg, kueri, CurrentPage, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
try: with contextlib.redirect_stdout(sys.stderr):
if kueri: try:
lk21json = await fetch.get(f"{web['yasirapi']}/lk21?q={kueri}") if kueri:
else: lk21json = await fetch.get(f"{web['yasirapi']}/lk21?q={kueri}")
lk21json = await fetch.get(f"{web['yasirapi']}/lk21") else:
lk21json.raise_for_status() lk21json = await fetch.get(f"{web['yasirapi']}/lk21")
except httpx.HTTPError as exc: lk21json.raise_for_status()
await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>") except httpx.HTTPError as exc:
return None, None await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>")
return None, None
res = lk21json.json() res = lk21json.json()
if not res.get("result"): if not res.get("result"):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
@ -143,15 +147,16 @@ async def getDatalk21(msg, kueri, CurrentPage, strings):
# Pahe GetData # Pahe GetData
async def getDataPahe(msg, kueri, CurrentPage, strings): async def getDataPahe(msg, kueri, CurrentPage, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
try: with contextlib.redirect_stdout(sys.stderr):
if kueri: try:
pahejson = await fetch.get(f"{web['yasirapi']}/pahe?q={kueri}") if kueri:
else: pahejson = await fetch.get(f"{web['yasirapi']}/pahe?q={kueri}")
pahejson = await fetch.get(f"{web['yasirapi']}/pahe") else:
pahejson.raise_for_status() pahejson = await fetch.get(f"{web['yasirapi']}/pahe")
except httpx.HTTPError as exc: pahejson.raise_for_status()
await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>") except httpx.HTTPError as exc:
return None, None await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>")
return None, None
res = pahejson.json() res = pahejson.json()
if not res.get("result"): if not res.get("result"):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
@ -175,14 +180,15 @@ async def getDataPahe(msg, kueri, CurrentPage, strings):
async def getDataKuso(msg, kueri, CurrentPage, user, strings): async def getDataKuso(msg, kueri, CurrentPage, user, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
kusodata = [] kusodata = []
try: with contextlib.redirect_stdout(sys.stderr):
data = await fetch.get( try:
f"{web['kusonime']}/?s={kueri}", follow_redirects=True data = await fetch.get(
) f"{web['kusonime']}/?s={kueri}", follow_redirects=True
data.raise_for_status() )
except httpx.HTTPError as exc: data.raise_for_status()
await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True) except httpx.HTTPError as exc:
return None, 0, None, None await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True)
return None, 0, None, None
res = BeautifulSoup(data, "lxml").find_all("h2", {"class": "episodeye"}) res = BeautifulSoup(data, "lxml").find_all("h2", {"class": "episodeye"})
for i in res: for i in res:
ress = i.find_all("a")[0] ress = i.find_all("a")[0]
@ -224,14 +230,15 @@ async def getDataKuso(msg, kueri, CurrentPage, user, strings):
async def getDataMovieku(msg, kueri, CurrentPage, strings): async def getDataMovieku(msg, kueri, CurrentPage, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
moviekudata = [] moviekudata = []
try: with contextlib.redirect_stdout(sys.stderr):
data = await fetch.get( try:
f"{web['movieku']}/?s={kueri}", follow_redirects=True data = await fetch.get(
) f"{web['movieku']}/?s={kueri}", follow_redirects=True
data.raise_for_status() )
except httpx.HTTPError as exc: data.raise_for_status()
await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>") except httpx.HTTPError as exc:
return None, None await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>")
return None, None
r = BeautifulSoup(data, "lxml") r = BeautifulSoup(data, "lxml")
res = r.find_all(class_="bx") res = r.find_all(class_="bx")
for i in res: for i in res:
@ -261,14 +268,15 @@ async def getDataMovieku(msg, kueri, CurrentPage, strings):
async def getDataNodrakor(msg, kueri, CurrentPage, user, strings): async def getDataNodrakor(msg, kueri, CurrentPage, user, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
nodrakordata = [] nodrakordata = []
try: with contextlib.redirect_stdout(sys.stderr):
data = await fetch.get( try:
f"{web['nodrakor']}/?s={kueri}", follow_redirects=True, data = await fetch.get(
) f"{web['nodrakor']}/?s={kueri}", follow_redirects=True,
data.raise_for_status() )
except httpx.HTTPError as exc: data.raise_for_status()
await msg.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True) except httpx.HTTPError as exc:
return None, 0, None await msg.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True)
return None, 0, None
text = BeautifulSoup(data, "lxml") text = BeautifulSoup(data, "lxml")
entry = text.find_all(class_="entry-header") entry = text.find_all(class_="entry-header")
if entry[0].text.strip() == "Nothing Found": if entry[0].text.strip() == "Nothing Found":
@ -308,14 +316,15 @@ async def getDataNodrakor(msg, kueri, CurrentPage, user, strings):
async def getDataSavefilm21(msg, kueri, CurrentPage, user, strings): async def getDataSavefilm21(msg, kueri, CurrentPage, user, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
sfdata = [] sfdata = []
try: with contextlib.redirect_stdout(sys.stderr):
data = await fetch.get( try:
f"{web['savefilm21']}/?s={kueri}", follow_redirects=True, data = await fetch.get(
) f"{web['savefilm21']}/?s={kueri}", follow_redirects=True,
data.raise_for_status() )
except httpx.HTTPError as exc: data.raise_for_status()
await msg.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True) except httpx.HTTPError as exc:
return None, 0, None await msg.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True)
return None, 0, None
text = BeautifulSoup(data, "lxml") text = BeautifulSoup(data, "lxml")
entry = text.find_all(class_="entry-header") entry = text.find_all(class_="entry-header")
if "Tidak Ditemukan" in entry[0].text: if "Tidak Ditemukan" in entry[0].text:
@ -354,17 +363,18 @@ async def getDataSavefilm21(msg, kueri, CurrentPage, user, strings):
# Lendrive GetData # Lendrive GetData
async def getDataLendrive(msg, kueri, CurrentPage, user, strings): async def getDataLendrive(msg, kueri, CurrentPage, user, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
try: with contextlib.redirect_stdout(sys.stderr):
if kueri: try:
data = await fetch.get( if kueri:
f"{web['lendrive']}/?s={kueri}", follow_redirects=True, data = await fetch.get(
) f"{web['lendrive']}/?s={kueri}", follow_redirects=True,
else: )
data = await fetch.get(web["lendrive"], follow_redirects=True) else:
data.raise_for_status() data = await fetch.get(web["lendrive"], follow_redirects=True)
except httpx.HTTPError as exc: data.raise_for_status()
await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True) except httpx.HTTPError as exc:
return None, 0, None await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True)
return None, 0, None
res = BeautifulSoup(data, "lxml") res = BeautifulSoup(data, "lxml")
lenddata = [] lenddata = []
for o in res.find_all(class_="bsx"): for o in res.find_all(class_="bsx"):
@ -409,14 +419,15 @@ async def getDataLendrive(msg, kueri, CurrentPage, user, strings):
# MelongMovie GetData # MelongMovie GetData
async def getDataMelong(msg, kueri, CurrentPage, user, strings): async def getDataMelong(msg, kueri, CurrentPage, user, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
try: with contextlib.redirect_stdout(sys.stderr):
data = await fetch.get( try:
f"{web['melongmovie']}/?s={kueri}", follow_redirects=True, data = await fetch.get(
) f"{web['melongmovie']}/?s={kueri}", follow_redirects=True,
data.raise_for_status() )
except httpx.HTTPError as exc: data.raise_for_status()
await msg.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True) except httpx.HTTPError as exc:
return None, 0, None await msg.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True)
return None, 0, None
bs4 = BeautifulSoup(data, "lxml") bs4 = BeautifulSoup(data, "lxml")
melongdata = [] melongdata = []
for res in bs4.select(".box"): for res in bs4.select(".box"):
@ -454,14 +465,15 @@ async def getDataMelong(msg, kueri, CurrentPage, user, strings):
# GoMov GetData # GoMov GetData
async def getDataGomov(msg, kueri, CurrentPage, user, strings): async def getDataGomov(msg, kueri, CurrentPage, user, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
try: with contextlib.redirect_stdout(sys.stderr):
gomovv = await fetch.get( try:
f"{web['gomov']}/?s={kueri}", follow_redirects=True gomovv = await fetch.get(
) f"{web['gomov']}/?s={kueri}", follow_redirects=True
gomovv.raise_for_status() )
except httpx.HTTPError as exc: gomovv.raise_for_status()
await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True) except httpx.HTTPError as exc:
return None, 0, None await msg.edit_msg(f"ERROR: Failed to fetch data from {exc.request.url} - <code>{exc}</code>", disable_web_page_preview=True)
return None, 0, None
text = BeautifulSoup(gomovv, "lxml") text = BeautifulSoup(gomovv, "lxml")
entry = text.find_all(class_="entry-header") entry = text.find_all(class_="entry-header")
if entry[0].text.strip() == "Tidak Ditemukan": if entry[0].text.strip() == "Tidak Ditemukan":
@ -1310,19 +1322,20 @@ async def savefilm21_scrap(_, callback_query, strings):
), ),
InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"), InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"),
) )
try: with contextlib.redirect_stdout(sys.stderr):
html = await fetch.get(link) try:
html.raise_for_status() html = await fetch.get(link)
soup = BeautifulSoup(html.text, "lxml") html.raise_for_status()
res = soup.find_all(class_="button button-shadow") soup = BeautifulSoup(html.text, "lxml")
res = "".join(f"{i.text}\n{i['href']}\n\n" for i in res) res = soup.find_all(class_="button button-shadow")
await callback_query.message.edit_msg( res = "".join(f"{i.text}\n{i['href']}\n\n" for i in res)
strings("res_scrape").format(link=link, kl=res), reply_markup=keyboard await callback_query.message.edit_msg(
) strings("res_scrape").format(link=link, kl=res), reply_markup=keyboard
except httpx.HTTPError as exc: )
await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard) except httpx.HTTPError as exc:
except Exception as err: await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard)
await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard) except Exception as err:
await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard)
# NoDrakor DDL # NoDrakor DDL
@ -1349,64 +1362,66 @@ async def nodrakorddl_scrap(_, callback_query, strings):
), ),
InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"), InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"),
) )
try: with contextlib.redirect_stdout(sys.stderr):
html = await fetch.get(link) try:
html.raise_for_status() html = await fetch.get(link)
soup = BeautifulSoup(html.text, "lxml") html.raise_for_status()
if "/tv/" in link: soup = BeautifulSoup(html.text, "lxml")
result = soup.find("div", {"entry-content entry-content-single"}).find_all("p") if "/tv/" in link:
msg = "" result = soup.find("div", {"entry-content entry-content-single"}).find_all("p")
for i in result: msg = ""
msg += str(f"{i}\n") for i in result:
link = await post_to_telegraph(False, "MissKaty NoDrakor", msg) msg += str(f"{i}\n")
return await callback_query.message.edit_msg( link = await post_to_telegraph(False, "MissKaty NoDrakor", msg)
strings("res_scrape").format(link=link, kl=link), reply_markup=keyboard return await callback_query.message.edit_msg(
strings("res_scrape").format(link=link, kl=link), reply_markup=keyboard
)
res = soup.find_all(class_="button button-shadow")
res = "".join(f"{i.text}\n{i['href']}\n\n" for i in res)
if len(res) > 3500:
link = await post_to_telegraph(False, "MissKaty NoDrakor", res)
return await callback_query.message.edit_msg(
strings("res_scrape").format(link=link, kl=link), reply_markup=keyboard
)
await callback_query.message.edit_msg(
strings("res_scrape").format(link=link, kl=res), reply_markup=keyboard
) )
res = soup.find_all(class_="button button-shadow") except httpx.HTTPError as exc:
res = "".join(f"{i.text}\n{i['href']}\n\n" for i in res) await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard)
if len(res) > 3500: except Exception as err:
link = await post_to_telegraph(False, "MissKaty NoDrakor", res) await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard)
return await callback_query.message.edit_msg(
strings("res_scrape").format(link=link, kl=link), reply_markup=keyboard
)
await callback_query.message.edit_msg(
strings("res_scrape").format(link=link, kl=res), reply_markup=keyboard
)
except httpx.HTTPError as exc:
await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard)
except Exception as err:
await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard)
# Scrape Link Download Movieku.CC # Scrape Link Download Movieku.CC
@app.on_cmd("movieku_scrap") @app.on_cmd("movieku_scrap")
@use_chat_lang() @use_chat_lang()
async def muviku_scrap(_, message, strings): async def muviku_scrap(_, message, strings):
try: with contextlib.redirect_stdout(sys.stderr):
link = message.text.split(maxsplit=1)[1] try:
html = await fetch.get(link) link = message.text.split(maxsplit=1)[1]
html.raise_for_status() html = await fetch.get(link)
soup = BeautifulSoup(html.text, "lxml") html.raise_for_status()
res = soup.find_all(class_="smokeurl") soup = BeautifulSoup(html.text, "lxml")
data = [] res = soup.find_all(class_="smokeurl")
for i in res: data = []
for b in range(len(i.find_all("a"))): for i in res:
link = i.find_all("a")[b]["href"] for b in range(len(i.find_all("a"))):
kualitas = i.find_all("a")[b].text link = i.find_all("a")[b]["href"]
# print(f"{kualitas}\n{link kualitas = i.find_all("a")[b].text
data.append({"link": link, "kualitas": kualitas}) # print(f"{kualitas}\n{link
if not data: data.append({"link": link, "kualitas": kualitas})
return await message.reply(strings("no_result")) if not data:
res = "".join(f"<b>Host: {i['kualitas']}</b>\n{i['link']}\n\n" for i in data) return await message.reply(strings("no_result"))
await message.reply(res) res = "".join(f"<b>Host: {i['kualitas']}</b>\n{i['link']}\n\n" for i in data)
except IndexError: await message.reply(res)
return await message.reply( except IndexError:
strings("invalid_cmd_scrape").format(cmd=message.command[0]) return await message.reply(
) strings("invalid_cmd_scrape").format(cmd=message.command[0])
except httpx.HTTPError as exc: )
await message.reply(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>") except httpx.HTTPError as exc:
except Exception as e: await message.reply(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>")
await message.reply(f"ERROR: {str(e)}") except Exception as e:
await message.reply(f"ERROR: {str(e)}")
# Scrape DDL Link Melongmovie # Scrape DDL Link Melongmovie
@ -1433,22 +1448,23 @@ async def melong_scrap(_, callback_query, strings):
), ),
InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"), InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"),
) )
try: with contextlib.redirect_stdout(sys.stderr):
html = await fetch.get(link) try:
html.raise_for_status() html = await fetch.get(link)
soup = BeautifulSoup(html.text, "lxml") html.raise_for_status()
rep = "" soup = BeautifulSoup(html.text, "lxml")
for ep in soup.findAll(text=re.compile(r"(?i)episode\s+\d+|LINK DOWNLOAD")): rep = ""
hardsub = ep.findPrevious("div") for ep in soup.findAll(text=re.compile(r"(?i)episode\s+\d+|LINK DOWNLOAD")):
softsub = ep.findNext("div") hardsub = ep.findPrevious("div")
rep += f"{hardsub}\n{softsub}" softsub = ep.findNext("div")
await callback_query.message.edit_msg( rep += f"{hardsub}\n{softsub}"
strings("res_scrape").format(link=link, kl=rep), reply_markup=keyboard await callback_query.message.edit_msg(
) strings("res_scrape").format(link=link, kl=rep), reply_markup=keyboard
except httpx.HTTPError as exc: )
await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard) except httpx.HTTPError as exc:
except Exception as err: await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard)
await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard) except Exception as err:
await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard)
# Scrape DDL Link Gomov # Scrape DDL Link Gomov
@ -1475,23 +1491,24 @@ async def gomov_dl(_, callback_query, strings):
), ),
InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"), InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"),
) )
try: with contextlib.redirect_stdout(sys.stderr):
html = await fetch.get(link) try:
html.raise_for_status() html = await fetch.get(link)
soup = BeautifulSoup(html.text, "lxml") html.raise_for_status()
entry = soup.find(class_="gmr-download-wrap clearfix") soup = BeautifulSoup(html.text, "lxml")
hasil = soup.find(class_="title-download").text entry = soup.find(class_="gmr-download-wrap clearfix")
for i in entry.find(class_="list-inline gmr-download-list clearfix"): hasil = soup.find(class_="title-download").text
title = i.find("a").text for i in entry.find(class_="list-inline gmr-download-list clearfix"):
ddl = i.find("a")["href"] title = i.find("a").text
hasil += f"\n{title}\n{ddl}\n" ddl = i.find("a")["href"]
await callback_query.message.edit_msg( hasil += f"\n{title}\n{ddl}\n"
strings("res_scrape").format(link=link, kl=hasil), reply_markup=keyboard await callback_query.message.edit_msg(
) strings("res_scrape").format(link=link, kl=hasil), reply_markup=keyboard
except httpx.HTTPError as exc: )
await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard) except httpx.HTTPError as exc:
except Exception as err: await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard)
await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard) except Exception as err:
await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard)
@app.on_cb("lendriveextract#") @app.on_cb("lendriveextract#")
@ -1515,23 +1532,24 @@ async def lendrive_dl(_, callback_query, strings):
), ),
InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"), InlineButton(strings("cl_btn"), f"close#{callback_query.from_user.id}"),
) )
try: with contextlib.redirect_stdout(sys.stderr):
hmm = await fetch.get(link) try:
hmm.raise_for_status() hmm = await fetch.get(link)
q = BeautifulSoup(hmm.text, "lxml") hmm.raise_for_status()
j = q.findAll("div", class_="soraurlx") q = BeautifulSoup(hmm.text, "lxml")
kl = "" j = q.findAll("div", class_="soraurlx")
for i in j: kl = ""
if not i.find("a"): for i in j:
continue if not i.find("a"):
kl += f"{i.find('strong')}:\n" continue
kl += "".join( kl += f"{i.find('strong')}:\n"
f"[ <a href='{a.get('href')}'>{a.text}</a> ]\n" for a in i.findAll("a") kl += "".join(
f"[ <a href='{a.get('href')}'>{a.text}</a> ]\n" for a in i.findAll("a")
)
await callback_query.message.edit_msg(
strings("res_scrape").format(link=link, kl=kl), reply_markup=keyboard
) )
await callback_query.message.edit_msg( except httpx.HTTPError as exc:
strings("res_scrape").format(link=link, kl=kl), reply_markup=keyboard await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard)
) except Exception as err:
except httpx.HTTPError as exc: await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard)
await callback_query.message.edit_msg(f"HTTP Exception for {exc.request.url} - <code>{exc}</code>", reply_markup=keyboard)
except Exception as err:
await callback_query.message.edit_msg(f"ERROR: {err}", reply_markup=keyboard)