From 2cd72cce507a73c6cc7fd1e13a62d82bc5844690 Mon Sep 17 00:00:00 2001 From: yasir Date: Sun, 8 Jan 2023 20:16:15 +0700 Subject: [PATCH] Tes update --- misskaty/__main__.py | 4 +- misskaty/core/message_utils.py | 32 ++++++++++++++- misskaty/helper/__init__.py | 1 + misskaty/plugins/scrapwebsite.py | 65 +++++++++++++++++++++++++++---- misskaty/plugins/sub_extractor.py | 19 +++------ 5 files changed, 95 insertions(+), 26 deletions(-) diff --git a/misskaty/__main__.py b/misskaty/__main__.py index 01037da7..a3decdc2 100644 --- a/misskaty/__main__.py +++ b/misskaty/__main__.py @@ -19,8 +19,8 @@ from misskaty import ( UBOT_USERNAME, ) from misskaty.plugins import ALL_MODULES -from misskaty.helper import paginate_modules -from misskaty.helper.tools import bot_sys_stats +from misskaty.helper import paginate_modules, bot_sys_stats +from misskaty.core.message_utils import * from database.users_chats_db import db from misskaty.vars import LOG_CHANNEL, SUDO from utils import temp, auto_clean diff --git a/misskaty/core/message_utils.py b/misskaty/core/message_utils.py index 4b56fd00..552509f3 100644 --- a/misskaty/core/message_utils.py +++ b/misskaty/core/message_utils.py @@ -1,16 +1,44 @@ import asyncio from logging import getLogger +from pyrogram.errors import ChatWriteForbidden, MessageNotModified, FloodWait LOGGER = getLogger(__name__) +# handler for TG function, so need write exception in every code -async def kirimPesan(msg, text: str, reply_markup=None): + +async def kirimPesan(msg, text: str, disable_web_page_preview=True, reply_markup=None): try: - return await msg.reply(text, disable_web_page_preview=True) + return await msg.reply(text) except FloodWait as e: LOGGER.warning(str(e)) await asyncio.sleep(e.value) return await kirimPesan(text) + except ChatWriteForbidden: + return await msg.leave() except Exception as e: LOGGER.error(str(e)) + + +async def editPesan(msg, text: str, disable_web_page_preview=True, reply_markup=None): + try: + return await msg.edit(text) + except FloodWait as e: + LOGGER.warning(str(e)) + await asyncio.sleep(e.value) + return await editPesan(msg, text) + except MessageNotModified: return + except Exception as e: + LOGGER.error(str(e)) + + +async def hapusPesan(msg): + try: + return await msg.delete() + except FloodWait as e: + LOGGER.warning(str(e)) + await asyncio.sleep(e.value) + return await hapusPesan(msg) + except Exception as e: + LOGGER.error(str(e)) diff --git a/misskaty/helper/__init__.py b/misskaty/helper/__init__.py index e20f7f0c..b350b85d 100644 --- a/misskaty/helper/__init__.py +++ b/misskaty/helper/__init__.py @@ -1 +1,2 @@ from .misc import paginate_modules +from .tools import bot_sys_stats diff --git a/misskaty/plugins/scrapwebsite.py b/misskaty/plugins/scrapwebsite.py index d953b4be..5b697d7a 100644 --- a/misskaty/plugins/scrapwebsite.py +++ b/misskaty/plugins/scrapwebsite.py @@ -26,7 +26,8 @@ __HELP__ = """ /terbit21 [query ] - Scrape website data from Terbit21. If without query will give latest movie list. /savefilm21 [query ] - Scrape website data from Savefilm21. If without query will give latest movie list. /movieku [query ] - Scrape website data from Movieku.cc -/nodrakor [query] - Scrape website data from nodrakor +/nodrakor [query] - Scrape website data from nodrakor.icu +/zonafilm [query] - Scrape website data from zonafilm.icu /gomov [query ] - Scrape website data from GoMov. If without query will give latest movie list. """ @@ -37,6 +38,56 @@ headers = { } +@app.on_message(filters.command(["zonafilm"], COMMAND_HANDLER)) +@capture_err +async def zonafilm(_, msg): + m = await msg.reply("**__⏳ Please wait, scraping data ...__**", True) + try: + title = msg.text.split(" ", 1)[1] + except IndexError: + title = "" + try: + html = await http.get(f"http://173.212.199.27/?s={title}", headers=headers) + text = BeautifulSoup(html.text, "lxml") + entry = text.find_all(class_="entry-header") + if "Nothing Found" in entry[0].text: + await m.delete() + if title != "": + await msg.reply(f"404 Not FOUND For: {title}", True) + else: + await msg.reply(f"404 Not FOUND!", True) + return + data = [] + for i in entry: + genre = i.find(class_="gmr-movie-on").text + genre = f"{genre}" if genre != "" else "N/A" + judul = i.find(class_="entry-title").find("a").text + link = i.find(class_="entry-title").find("a").get("href") + data.append({"judul": judul, "link": link, "genre": genre}) + if title != "": + head = f"#Zonafilm Results For: {title}\n\n" + else: + head = f"#Zonafilm Latest:\n🌀 Use /{msg.command[0]} [title] to start search with title.\n\n" + msgs = "" + await m.delete() + for c, i in enumerate(data, start=1): + msgs += f"{c}. {i['judul']}\nGenre: {i['genre']}\nExtract: /{msg.command[0]}_scrap {i['link']}\n\n" + if len(head.encode("utf-8") + msgs.encode("utf-8")) >= 4000: + await msg.reply( + head + msgs, + True, + disable_web_page_preview=True, + ) + await asyncio.sleep(2) + msgs = "" + if msgs != "": + await msg.reply(head + msgs, True, disable_web_page_preview=True) + except Exception as e: + LOGGER.error(e) + await m.delete() + await msg.reply(f"ERROR: {e}", True) + + @app.on_message(filters.command(["nodrakor"], COMMAND_HANDLER)) @capture_err async def nodrakor(_, msg): @@ -197,7 +248,7 @@ async def movikucc(_, msg): @app.on_message(filters.command(["savefilm21"], COMMAND_HANDLER)) @capture_err async def savefilm21(_, msg): - SITE = "http://185.99.135.215" + SITE = "https://185.99.135.215" try: title = msg.text.split(" ", 1)[1] except: @@ -206,9 +257,7 @@ async def savefilm21(_, msg): data = [] try: if title is not None: - html = await http.get( - f"{SITE}/?s={title}", headers=headers, follow_redirects=False - ) + html = await http.get(f"{SITE}/?s={title}", headers=headers) bs4 = BeautifulSoup(html.text, "lxml") res = bs4.find_all(class_="entry-title") for i in res: @@ -239,7 +288,7 @@ async def savefilm21(_, msg): disable_web_page_preview=True, ) else: - html = await http.get(SITE, headers=headers, follow_redirects=False) + html = await http.get(SITE, headers=headers) bs4 = BeautifulSoup(html.text, "lxml") res = bs4.find_all(class_="entry-title") for i in res: @@ -549,7 +598,7 @@ async def savefilm21_scrap(_, message): "User-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36 Edge/18.19582" } - html = await http.get(link, headers=headers, follow_redirects=False) + html = await http.get(link, headers=headers) soup = BeautifulSoup(html.text, "lxml") res = soup.find_all(class_="button button-shadow") res = "".join(f"{i.text}\n{i['href']}\n\n" for i in res) @@ -573,7 +622,7 @@ async def nodrakor_scrap(_, message): "User-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36 Edge/18.19582" } - html = await http.get(link, headers=headers, follow_redirects=False) + html = await http.get(link, headers=headers) soup = BeautifulSoup(html.text, "lxml") hasil = soup.find_all(class_="gmr-download-wrap clearfix")[0] await message.reply(f"Hasil Scrap dari {link}:\n{hasil}") diff --git a/misskaty/plugins/sub_extractor.py b/misskaty/plugins/sub_extractor.py index 66788e25..168e825d 100644 --- a/misskaty/plugins/sub_extractor.py +++ b/misskaty/plugins/sub_extractor.py @@ -78,7 +78,7 @@ async def ceksub(_, m): [ InlineKeyboardButton( f"0:{mapping}({lang}): {stream_type}: {stream_name}", - f"streamextract_{mapping}_{stream_name}", + f"streamextract_0:{mapping}_{stream_name}", ) ] ) @@ -119,7 +119,8 @@ async def convertsrt(c, m): ) (await shell_exec(f"mediaextract -i '{dl}' '{filename}'.srt"))[0] await m.reply_document( - f"{filename}.srt", caption=f"{filename}.srt\n\nConverted by @{c.me.username}" + f"{filename}.srt", + caption=f"{filename}.srt\n\nConverted by @{c.me.username}", ) await msg.delete() try: @@ -148,24 +149,14 @@ async def stream_extract(bot, update): format = "mp3" elif codec == "eac3": format = "eac3" - elif codec == "subrip": - format = "srt" - elif codec == "ass": - format == "ass" else: - format = None - if not format: - return await update.answer( - "⚠️ Unsupported format, try extract manual using ffmpeg" - ) + format = "srt" start_time = perf_counter() namafile = get_subname(link, format) LOGGER.info( f"ExtractSub: {namafile} by {update.from_user.first_name} [{update.from_user.id}]" ) - extract = (await shell_exec(f"mediaextract -i {link} -map 0:{map} {namafile}"))[ - 0 - ] + extract = (await shell_exec(f"mediaextract -i {link} -map {map} {namafile}"))[0] end_time = perf_counter() timelog = "{:.2f}".format(end_time - start_time) + " second" await update.message.reply_document(