diff --git a/misskaty/plugins/web_scraper.py b/misskaty/plugins/web_scraper.py
index 08121127..5dbc84fa 100644
--- a/misskaty/plugins/web_scraper.py
+++ b/misskaty/plugins/web_scraper.py
@@ -117,6 +117,66 @@ async def getDataPahe(msg, kueri, CurrentPage):
except (IndexError, KeyError):
await msg.edit("Sorry could not find any matching results!")
+# Nodrakor GetData
+async def getDataNodrakor(msg, kueri, CurrentPage):
+ if not SCRAP_DICT.get(msg.id):
+ nodrakor = await http.get(f'https://zonafilm.icu/?s={kueri}', headers=headers)
+ text = BeautifulSoup(nodrakor.text, "lxml")
+ entry = text.find_all(class_="entry-header")
+ if "Nothing Found" in entry[0].text:
+ if not kueri:
+ return await msg.edit("404 Not FOUND!")
+ else:
+ return await msg.edit(f"404 Not FOUND For: {kueri}")
+ data = []
+ for i in entry:
+ genre = i.find(class_="gmr-movie-on").text
+ genre = f"{genre}" if genre != "" else "N/A"
+ judul = i.find(class_="entry-title").find("a").text
+ link = i.find(class_="entry-title").find("a").get("href")
+ data.append({"judul": judul, "link": link, "genre": genre})
+ SCRAP_DICT[msg.id] = [split_arr(data, 6), kueri]
+ try:
+ index = int(CurrentPage - 1)
+ PageLen = len(SCRAP_DICT[msg.id][0])
+
+ NodrakorResult = f"#Nodrakor Results For: {kueri}\n\n" if kueri else f"#Nodrakor Latest:\nš Use /zonafilm [title] to start search with title.\n\n"
+ for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
+ NodrakorResult += f"{c}. {i['judul']}\nGenre: {i['genre']}\n"
+ NodrakorResult += f"Extract: /zonafilm_scrap {i['link']}\n\n" if "/tv/" not in i["link"] else "\n"
+ IGNORE_CHAR = "[]"
+ NodrakorResult = ''.join(i for i in NodrakorResult if not i in IGNORE_CHAR)
+ return NodrakorResult, PageLen
+ except (IndexError, KeyError):
+ await msg.edit("Sorry could not find any matching results!")
+
+# Movieku GetData
+async def getDataMovieku(msg, kueri, CurrentPage):
+ if not SCRAP_DICT.get(msg.id):
+ moviekudata = []
+ data = await http.get(f'https://107.152.37.223/?s={kueri}', headers=headers)
+ r = BeautifulSoup(data.text, "lxml")
+ res = r.find_all(class_="bx")
+ for i in res:
+ judul = i.find_all("a")[0]["title"]
+ link = i.find_all("a")[0]["href"]
+ moviekudata.append({"judul": judul, "link": link})
+ if not moviekudata:
+ return await msg.edit("Sorry could not find any results!")
+ SCRAP_DICT[msg.id] = [split_arr(moviekudata, 6), kueri]
+ try:
+ index = int(CurrentPage - 1)
+ PageLen = len(SCRAP_DICT[msg.id][0])
+
+ moviekuResult = f"#Movieku Latest:\nš Use /movieku [title] to start search with title.\n\n" if kueri == "" else f"#Movieku Results For: {kueri}\n\n"
+ for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
+ moviekuResult += f"{c}. {i['judul']}\nQuality: {i['quality']}\nExtract: /melongmovie_scrap {i['link']}\n\n"
+ IGNORE_CHAR = "[]"
+ moviekuResult = ''.join(i for i in moviekuResult if not i in IGNORE_CHAR)
+ return moviekuResult, PageLen
+ except (IndexError, KeyError):
+ await msg.edit("Sorry could not find any matching results!")
+
# Savefilm21 GetData
async def getDataSavefilm21(msg, kueri, CurrentPage):
if not SCRAP_DICT.get(msg.id):
@@ -136,9 +196,9 @@ async def getDataSavefilm21(msg, kueri, CurrentPage):
index = int(CurrentPage - 1)
PageLen = len(SCRAP_DICT[msg.id][0])
- sfResult = f"#SaveFilm21 Latest:\nš Use /savefilm21 [title] to start search with title.\n\n"
+ sfResult = f"#SaveFilm21 Latest:\nš Use /savefilm21 [title] to start search with title.\n\n" if kueri == "" else f"#Savefilm21 Results For: {kueri}\n\n"
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
- sfResult += f"{c}. {i['judul']}\nQuality: {i['quality']}\nExtract: /melongmovie_scrap {i['link']}\n\n"
+ sfResult += f"{c}. {i['judul']}\nQuality: {i['quality']}\nExtract: /savefilm21_scrap {i['link']}\n\n"
IGNORE_CHAR = "[]"
sfResult = ''.join(i for i in sfResult if not i in IGNORE_CHAR)
return sfResult, PageLen
@@ -167,7 +227,7 @@ async def getDataMelong(msg, kueri, CurrentPage):
index = int(CurrentPage - 1)
PageLen = len(SCRAP_DICT[msg.id][0])
- melongResult = f"#MelongMovie Latest:\nš Use /melongmovie [title] to start search with title.\n\n"
+ melongResult = f"#MelongMovie Latest:\nš Use /melongmovie [title] to start search with title.\n\n" if kueri == "" else f"#MelongMovie Results For: {kueri}\n\n"
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
melongResult += f"{c}. {i['judul']}\nQuality: {i['quality']}\nExtract: /melongmovie_scrap {i['link']}\n\n"
IGNORE_CHAR = "[]"
@@ -356,6 +416,38 @@ async def savefilm_s(client, message):
)
await editPesan(pesan, savefilmres, reply_markup=keyboard)
+# Nodrakor CMD
+@app.on_message(filters.command(['nodrakor'], COMMAND_HANDLER))
+async def nodrakor_s(client, message):
+ kueri = ' '.join(message.command[1:])
+ if not kueri:
+ kueri = ""
+ pesan = await message.reply("ā³ Please wait, scraping data from Nodrakor..")
+ CurrentPage = 1
+ nodrakorres, PageLen = await getDataNodrakor(pesan, kueri, CurrentPage)
+ keyboard = InlineKeyboard()
+ keyboard.paginate(PageLen, CurrentPage, 'page_nodrakor#{number}' + f'#{pesan.id}#{message.from_user.id}')
+ keyboard.row(
+ InlineButton("ā Close", f"close#{message.from_user.id}")
+ )
+ await editPesan(pesan, nodrakorres, reply_markup=keyboard)
+
+# Movieku CMD
+@app.on_message(filters.command(['movieku'], COMMAND_HANDLER))
+async def movieku_s(client, message):
+ kueri = ' '.join(message.command[1:])
+ if not kueri:
+ kueri = ""
+ pesan = await message.reply("ā³ Please wait, scraping data from Movieku..")
+ CurrentPage = 1
+ moviekures, PageLen = await getDataMovieku(pesan, kueri, CurrentPage)
+ keyboard = InlineKeyboard()
+ keyboard.paginate(PageLen, CurrentPage, 'page_savefilm#{number}' + f'#{pesan.id}#{message.from_user.id}')
+ keyboard.row(
+ InlineButton("ā Close", f"close#{message.from_user.id}")
+ )
+ await editPesan(pesan, moviekures, reply_markup=keyboard)
+
# Savefillm21 Page Callback
@app.on_callback_query(filters.create(lambda _, __, query: 'page_savefilm#' in query.data))
async def savefilmpage_callback(client, callback_query):
@@ -380,6 +472,54 @@ async def savefilmpage_callback(client, callback_query):
)
await editPesan(callback_query.message, savefilmres, reply_markup=keyboard)
+# Nodrakor Page Callback
+@app.on_callback_query(filters.create(lambda _, __, query: 'page_nodrakor#' in query.data))
+async def nodraakorpage_callback(client, callback_query):
+ if callback_query.from_user.id != int(callback_query.data.split('#')[3]):
+ return await callback_query.answer("Not yours..", True)
+ message_id = int(callback_query.data.split('#')[2])
+ CurrentPage = int(callback_query.data.split('#')[1])
+ try:
+ kueri = SCRAP_DICT[message_id][1]
+ except KeyError:
+ return await callback_query.answer("Invalid callback data, please send CMD again..")
+
+ try:
+ modrakorres, PageLen = await getDataNodrakor(callback_query.message, kueri, CurrentPage)
+ except TypeError:
+ return
+
+ keyboard = InlineKeyboard()
+ keyboard.paginate(PageLen, CurrentPage, 'page_nodrakor#{number}' + f'#{message_id}#{callback_query.from_user.id}')
+ keyboard.row(
+ InlineButton("ā Close", f"close#{callback_query.from_user.id}")
+ )
+ await editPesan(callback_query.message, modrakorres, reply_markup=keyboard)
+
+# Movieku Page Callback
+@app.on_callback_query(filters.create(lambda _, __, query: 'page_movieku#' in query.data))
+async def moviekupage_callback(client, callback_query):
+ if callback_query.from_user.id != int(callback_query.data.split('#')[3]):
+ return await callback_query.answer("Not yours..", True)
+ message_id = int(callback_query.data.split('#')[2])
+ CurrentPage = int(callback_query.data.split('#')[1])
+ try:
+ kueri = SCRAP_DICT[message_id][1]
+ except KeyError:
+ return await callback_query.answer("Invalid callback data, please send CMD again..")
+
+ try:
+ moviekures, PageLen = await getDataMovieku(callback_query.message, kueri, CurrentPage)
+ except TypeError:
+ return
+
+ keyboard = InlineKeyboard()
+ keyboard.paginate(PageLen, CurrentPage, 'page_movieku#{number}' + f'#{message_id}#{callback_query.from_user.id}')
+ keyboard.row(
+ InlineButton("ā Close", f"close#{callback_query.from_user.id}")
+ )
+ await editPesan(callback_query.message, moviekures, reply_markup=keyboard)
+
# Terbit21 Page Callback
@app.on_callback_query(filters.create(lambda _, __, query: 'page_terbit21#' in query.data))
async def terbit21page_callback(client, callback_query):
@@ -516,7 +656,7 @@ async def zonafilmpage_callback(client, callback_query):
return
keyboard = InlineKeyboard()
- keyboard.paginate(PageLen, CurrentPage, 'page_gomov#{number}' + f'#{message_id}#{callback_query.from_user.id}')
+ keyboard.paginate(PageLen, CurrentPage, 'page_zonafilm#{number}' + f'#{message_id}#{callback_query.from_user.id}')
keyboard.row(
InlineButton("ā Close", f"close#{callback_query.from_user.id}")
)
@@ -639,216 +779,4 @@ async def gomov_zonafilm_dl(_, message):
except IndexError:
await message.reply(f"Gunakan command /{message.command[0]} [link] untuk scrap link download")
except Exception as err:
- await message.reply(f"ERROR: {err}")
-
-######## REWRITE SOON #################
-@app.on_message(filters.command(["nodrakor"], COMMAND_HANDLER))
-async def nodrakor(_, msg):
- m = await msg.reply("**__ā³ Please wait, scraping data ...__**", True)
- try:
- title = msg.text.split(" ", 1)[1]
- except IndexError:
- title = ""
- try:
- html = await http.get(f"http://173.212.199.27/?s={title}", headers=headers)
- text = BeautifulSoup(html.text, "lxml")
- entry = text.find_all(class_="entry-header")
- if "Nothing Found" in entry[0].text:
- await m.delete()
- if not title:
- await msg.reply("404 Not FOUND!", True)
- else:
- await msg.reply(f"404 Not FOUND For: {title}", True)
- return
- data = []
- for i in entry:
- genre = i.find(class_="gmr-movie-on").text
- genre = f"{genre[:-2]}" if genre != "" else "N/A"
- judul = i.find(class_="entry-title").find("a").text
- link = i.find(class_="entry-title").find("a").get("href")
- data.append({"judul": judul, "link": link, "genre": genre})
- head = f"#Nodrakor Results For: {title}\n\n" if title else f"#Nodrakor Latest:\nš Use /{msg.command[0]} [title] to start search with title.\n\n"
- msgs = ""
- await m.delete()
- for c, i in enumerate(data, start=1):
- msgs += f"{c}. {i['judul']}\nGenre: {i['genre']}\nExtract: /{msg.command[0]}_scrap {i['link']}\n\n"
- if len(head.encode("utf-8") + msgs.encode("utf-8")) >= 4000:
- await msg.reply(
- head + msgs,
- True,
- disable_web_page_preview=True,
- reply_markup=InlineKeyboardMarkup(
- [
- [
- InlineKeyboardButton(
- text="ā Close",
- callback_data=f"close#{msg.from_user.id}",
- )
- ]
- ]
- ),
- )
- await asyncio.sleep(2)
- msgs = ""
- if msgs != "":
- await msg.reply(head + msgs, True, disable_web_page_preview=True)
- except Exception as e:
- LOGGER.error(e)
- await m.delete()
- await msg.reply(f"ERROR: {e}", True)
-
-
-# Broken
-@app.on_message(filters.command(["ngefilm21"], COMMAND_HANDLER))
-async def ngefilm21(_, message):
- if len(message.command) == 1:
- return await message.reply("Masukkan query yang akan dicari..!!")
- title = message.text.split(" ", maxsplit=1)[1]
-
- msg = await message.reply("Sedang proses scrap, mohon tunggu..")
- try:
- html = await http.get(f"https://ngefilm.info/search?q={title}", headers=headers)
- soup = BeautifulSoup(html.text, "lxml")
- res = soup.find_all("h2")
- data = []
- for i in res:
- a = i.find_all("a")[0]
- judul = a.find_all(class_="r-snippetized")
- b = i.find_all("a")[0]["href"]
- data.append({"judul": judul[0].text, "link": b})
- if not data:
- return await msg.edit("Oops, data film tidak ditemukan.")
- res = "".join(f"{i['judul']}\n{i['link']}\n" for i in data)
- await msg.edit(
- f"Hasil Scrap dari Ngefilm21:\n{res}",
- reply_markup=InlineKeyboardMarkup(
- [
- [
- InlineKeyboardButton(
- text="ā Close",
- callback_data=f"close#{message.from_user.id}",
- )
- ]
- ]
- ),
- )
- except Exception as e:
- await msg.edit(f"ERROR: {str(e)}")
-
-
-# Scrape Web From Movieku.CC
-@app.on_message(filters.command(["movieku"], COMMAND_HANDLER))
-async def movikucc(_, msg):
- m = await msg.reply("**__ā³ Please wait, scraping data ...__**", True)
- data = []
- if len(msg.command) == 1:
- try:
- html = await http.get("https://107.152.37.223/")
- r = BeautifulSoup(html.text, "lxml")
- res = r.find_all(class_="bx")
- for i in res:
- judul = i.find_all("a")[0]["title"]
- link = i.find_all("a")[0]["href"]
- data.append({"judul": judul, "link": link})
- if not data:
- await m.delete()
- return await msg.reply("404 Result not FOUND!", True)
- await m.delete()
- head = f"#Movieku Latest:\nš Use /{msg.command[0]} [title] to start search with title.\n\n"
- msgs = ""
- for c, i in enumerate(data, start=1):
- msgs += f"{c}. {i['judul']}\nExtract: /{msg.command[0]}_scrap {i['link']}\n\n"
- if len(head.encode("utf-8") + msgs.encode("utf-8")) >= 4000:
- await msg.reply(
- head + msgs,
- True,
- disable_web_page_preview=True,
- reply_markup=InlineKeyboardMarkup(
- [
- [
- InlineKeyboardButton(
- text="ā Close",
- callback_data=f"close#{msg.from_user.id}",
- )
- ]
- ]
- ),
- )
- await asyncio.sleep(2)
- msgs = ""
- if msgs != "":
- await msg.reply(
- head + msgs,
- True,
- disable_web_page_preview=True,
- reply_markup=InlineKeyboardMarkup(
- [
- [
- InlineKeyboardButton(
- text="ā Close",
- callback_data=f"close#{msg.from_user.id}",
- )
- ]
- ]
- ),
- )
- except Exception as e:
- LOGGER.error(e)
- await m.delete()
- await msg.reply(f"ERROR: {e}", True)
- else:
- title = msg.text.split(" ", 1)[1]
- try:
- html = await http.get(f"https://107.152.37.223/?s={title}")
- r = BeautifulSoup(html.text, "lxml")
- res = r.find_all(class_="bx")
- for i in res:
- judul = i.find_all("a")[0]["title"]
- link = i.find_all("a")[0]["href"]
- data.append({"judul": judul, "link": link})
- if not data:
- await m.delete()
- return await msg.reply("404 Result not FOUND!", True)
- await m.delete()
- head = f"#Movieku Results For: {title}\n\n"
- msgs = ""
- for c, i in enumerate(data, start=1):
- msgs += f"{c}. {i['judul']}\nExtract: /{msg.command[0]}_scrap {i['link']}\n\n"
- if len(head.encode("utf-8") + msgs.encode("utf-8")) >= 4000:
- await msg.reply(
- head + msgs,
- True,
- disable_web_page_preview=True,
- reply_markup=InlineKeyboardMarkup(
- [
- [
- InlineKeyboardButton(
- text="ā Close",
- callback_data=f"close#{msg.from_user.id}",
- )
- ]
- ]
- ),
- )
- await asyncio.sleep(2)
- msgs = ""
- if msgs != "":
- await msg.reply(
- head + msgs,
- True,
- disable_web_page_preview=True,
- reply_markup=InlineKeyboardMarkup(
- [
- [
- InlineKeyboardButton(
- text="ā Close",
- callback_data=f"close#{msg.from_user.id}",
- )
- ]
- ]
- ),
- )
- except Exception as e:
- LOGGER.error(e)
- await m.delete()
- await msg.reply(f"ERROR: {e}", True)
\ No newline at end of file
+ await message.reply(f"ERROR: {err}")
\ No newline at end of file