This commit is contained in:
yasir 2023-01-11 15:28:39 +07:00
parent 527c2ad3d7
commit 5dcd2cb52a

View file

@ -117,6 +117,66 @@ async def getDataPahe(msg, kueri, CurrentPage):
except (IndexError, KeyError):
await msg.edit("Sorry could not find any matching results!")
# Nodrakor GetData
async def getDataNodrakor(msg, kueri, CurrentPage):
if not SCRAP_DICT.get(msg.id):
nodrakor = await http.get(f'https://zonafilm.icu/?s={kueri}', headers=headers)
text = BeautifulSoup(nodrakor.text, "lxml")
entry = text.find_all(class_="entry-header")
if "Nothing Found" in entry[0].text:
if not kueri:
return await msg.edit("404 Not FOUND!")
else:
return await msg.edit(f"404 Not FOUND For: {kueri}")
data = []
for i in entry:
genre = i.find(class_="gmr-movie-on").text
genre = f"{genre}" if genre != "" else "N/A"
judul = i.find(class_="entry-title").find("a").text
link = i.find(class_="entry-title").find("a").get("href")
data.append({"judul": judul, "link": link, "genre": genre})
SCRAP_DICT[msg.id] = [split_arr(data, 6), kueri]
try:
index = int(CurrentPage - 1)
PageLen = len(SCRAP_DICT[msg.id][0])
NodrakorResult = f"<b>#Nodrakor Results For:</b> <code>{kueri}</code>\n\n" if kueri else f"<b>#Nodrakor Latest:</b>\n🌀 Use /zonafilm [title] to start search with title.\n\n"
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
NodrakorResult += f"<b>{c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Genre:</b> <code>{i['genre']}</code>\n"
NodrakorResult += f"<b>Extract:</b> <code>/zonafilm_scrap {i['link']}</code>\n\n" if "/tv/" not in i["link"] else "\n"
IGNORE_CHAR = "[]"
NodrakorResult = ''.join(i for i in NodrakorResult if not i in IGNORE_CHAR)
return NodrakorResult, PageLen
except (IndexError, KeyError):
await msg.edit("Sorry could not find any matching results!")
# Movieku GetData
async def getDataMovieku(msg, kueri, CurrentPage):
if not SCRAP_DICT.get(msg.id):
moviekudata = []
data = await http.get(f'https://107.152.37.223/?s={kueri}', headers=headers)
r = BeautifulSoup(data.text, "lxml")
res = r.find_all(class_="bx")
for i in res:
judul = i.find_all("a")[0]["title"]
link = i.find_all("a")[0]["href"]
moviekudata.append({"judul": judul, "link": link})
if not moviekudata:
return await msg.edit("Sorry could not find any results!")
SCRAP_DICT[msg.id] = [split_arr(moviekudata, 6), kueri]
try:
index = int(CurrentPage - 1)
PageLen = len(SCRAP_DICT[msg.id][0])
moviekuResult = f"<b>#Movieku Latest:</b>\n🌀 Use /movieku [title] to start search with title.\n\n" if kueri == "" else f"<b>#Movieku Results For:</b> <code>{kueri}</code>\n\n"
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
moviekuResult += f"<b>{c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Quality:</b> {i['quality']}\n<b>Extract:</b> <code>/melongmovie_scrap {i['link']}</code>\n\n"
IGNORE_CHAR = "[]"
moviekuResult = ''.join(i for i in moviekuResult if not i in IGNORE_CHAR)
return moviekuResult, PageLen
except (IndexError, KeyError):
await msg.edit("Sorry could not find any matching results!")
# Savefilm21 GetData
async def getDataSavefilm21(msg, kueri, CurrentPage):
if not SCRAP_DICT.get(msg.id):
@ -136,9 +196,9 @@ async def getDataSavefilm21(msg, kueri, CurrentPage):
index = int(CurrentPage - 1)
PageLen = len(SCRAP_DICT[msg.id][0])
sfResult = f"<b>#SaveFilm21 Latest:</b>\n🌀 Use /savefilm21 [title] to start search with title.\n\n"
sfResult = f"<b>#SaveFilm21 Latest:</b>\n🌀 Use /savefilm21 [title] to start search with title.\n\n" if kueri == "" else f"<b>#Savefilm21 Results For:</b> <code>{kueri}</code>\n\n"
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
sfResult += f"<b>{c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Quality:</b> {i['quality']}\n<b>Extract:</b> <code>/melongmovie_scrap {i['link']}</code>\n\n"
sfResult += f"<b>{c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Quality:</b> {i['quality']}\n<b>Extract:</b> <code>/savefilm21_scrap {i['link']}</code>\n\n"
IGNORE_CHAR = "[]"
sfResult = ''.join(i for i in sfResult if not i in IGNORE_CHAR)
return sfResult, PageLen
@ -167,7 +227,7 @@ async def getDataMelong(msg, kueri, CurrentPage):
index = int(CurrentPage - 1)
PageLen = len(SCRAP_DICT[msg.id][0])
melongResult = f"<b>#MelongMovie Latest:</b>\n🌀 Use /melongmovie [title] to start search with title.\n\n"
melongResult = f"<b>#MelongMovie Latest:</b>\n🌀 Use /melongmovie [title] to start search with title.\n\n" if kueri == "" else f"<b>#MelongMovie Results For:</b> <code>{kueri}</code>\n\n"
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
melongResult += f"<b>{c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Quality:</b> {i['quality']}\n<b>Extract:</b> <code>/melongmovie_scrap {i['link']}</code>\n\n"
IGNORE_CHAR = "[]"
@ -356,6 +416,38 @@ async def savefilm_s(client, message):
)
await editPesan(pesan, savefilmres, reply_markup=keyboard)
# Nodrakor CMD
@app.on_message(filters.command(['nodrakor'], COMMAND_HANDLER))
async def nodrakor_s(client, message):
kueri = ' '.join(message.command[1:])
if not kueri:
kueri = ""
pesan = await message.reply("⏳ Please wait, scraping data from Nodrakor..")
CurrentPage = 1
nodrakorres, PageLen = await getDataNodrakor(pesan, kueri, CurrentPage)
keyboard = InlineKeyboard()
keyboard.paginate(PageLen, CurrentPage, 'page_nodrakor#{number}' + f'#{pesan.id}#{message.from_user.id}')
keyboard.row(
InlineButton("❌ Close", f"close#{message.from_user.id}")
)
await editPesan(pesan, nodrakorres, reply_markup=keyboard)
# Movieku CMD
@app.on_message(filters.command(['movieku'], COMMAND_HANDLER))
async def movieku_s(client, message):
kueri = ' '.join(message.command[1:])
if not kueri:
kueri = ""
pesan = await message.reply("⏳ Please wait, scraping data from Movieku..")
CurrentPage = 1
moviekures, PageLen = await getDataMovieku(pesan, kueri, CurrentPage)
keyboard = InlineKeyboard()
keyboard.paginate(PageLen, CurrentPage, 'page_savefilm#{number}' + f'#{pesan.id}#{message.from_user.id}')
keyboard.row(
InlineButton("❌ Close", f"close#{message.from_user.id}")
)
await editPesan(pesan, moviekures, reply_markup=keyboard)
# Savefillm21 Page Callback
@app.on_callback_query(filters.create(lambda _, __, query: 'page_savefilm#' in query.data))
async def savefilmpage_callback(client, callback_query):
@ -380,6 +472,54 @@ async def savefilmpage_callback(client, callback_query):
)
await editPesan(callback_query.message, savefilmres, reply_markup=keyboard)
# Nodrakor Page Callback
@app.on_callback_query(filters.create(lambda _, __, query: 'page_nodrakor#' in query.data))
async def nodraakorpage_callback(client, callback_query):
if callback_query.from_user.id != int(callback_query.data.split('#')[3]):
return await callback_query.answer("Not yours..", True)
message_id = int(callback_query.data.split('#')[2])
CurrentPage = int(callback_query.data.split('#')[1])
try:
kueri = SCRAP_DICT[message_id][1]
except KeyError:
return await callback_query.answer("Invalid callback data, please send CMD again..")
try:
modrakorres, PageLen = await getDataNodrakor(callback_query.message, kueri, CurrentPage)
except TypeError:
return
keyboard = InlineKeyboard()
keyboard.paginate(PageLen, CurrentPage, 'page_nodrakor#{number}' + f'#{message_id}#{callback_query.from_user.id}')
keyboard.row(
InlineButton("❌ Close", f"close#{callback_query.from_user.id}")
)
await editPesan(callback_query.message, modrakorres, reply_markup=keyboard)
# Movieku Page Callback
@app.on_callback_query(filters.create(lambda _, __, query: 'page_movieku#' in query.data))
async def moviekupage_callback(client, callback_query):
if callback_query.from_user.id != int(callback_query.data.split('#')[3]):
return await callback_query.answer("Not yours..", True)
message_id = int(callback_query.data.split('#')[2])
CurrentPage = int(callback_query.data.split('#')[1])
try:
kueri = SCRAP_DICT[message_id][1]
except KeyError:
return await callback_query.answer("Invalid callback data, please send CMD again..")
try:
moviekures, PageLen = await getDataMovieku(callback_query.message, kueri, CurrentPage)
except TypeError:
return
keyboard = InlineKeyboard()
keyboard.paginate(PageLen, CurrentPage, 'page_movieku#{number}' + f'#{message_id}#{callback_query.from_user.id}')
keyboard.row(
InlineButton("❌ Close", f"close#{callback_query.from_user.id}")
)
await editPesan(callback_query.message, moviekures, reply_markup=keyboard)
# Terbit21 Page Callback
@app.on_callback_query(filters.create(lambda _, __, query: 'page_terbit21#' in query.data))
async def terbit21page_callback(client, callback_query):
@ -516,7 +656,7 @@ async def zonafilmpage_callback(client, callback_query):
return
keyboard = InlineKeyboard()
keyboard.paginate(PageLen, CurrentPage, 'page_gomov#{number}' + f'#{message_id}#{callback_query.from_user.id}')
keyboard.paginate(PageLen, CurrentPage, 'page_zonafilm#{number}' + f'#{message_id}#{callback_query.from_user.id}')
keyboard.row(
InlineButton("❌ Close", f"close#{callback_query.from_user.id}")
)
@ -639,216 +779,4 @@ async def gomov_zonafilm_dl(_, message):
except IndexError:
await message.reply(f"Gunakan command /{message.command[0]} <b>[link]</b> untuk scrap link download")
except Exception as err:
await message.reply(f"ERROR: {err}")
######## REWRITE SOON #################
@app.on_message(filters.command(["nodrakor"], COMMAND_HANDLER))
async def nodrakor(_, msg):
m = await msg.reply("**__⏳ Please wait, scraping data ...__**", True)
try:
title = msg.text.split(" ", 1)[1]
except IndexError:
title = ""
try:
html = await http.get(f"http://173.212.199.27/?s={title}", headers=headers)
text = BeautifulSoup(html.text, "lxml")
entry = text.find_all(class_="entry-header")
if "Nothing Found" in entry[0].text:
await m.delete()
if not title:
await msg.reply("404 Not FOUND!", True)
else:
await msg.reply(f"404 Not FOUND For: {title}", True)
return
data = []
for i in entry:
genre = i.find(class_="gmr-movie-on").text
genre = f"{genre[:-2]}" if genre != "" else "N/A"
judul = i.find(class_="entry-title").find("a").text
link = i.find(class_="entry-title").find("a").get("href")
data.append({"judul": judul, "link": link, "genre": genre})
head = f"<b>#Nodrakor Results For:</b> <code>{title}</code>\n\n" if title else f"<b>#Nodrakor Latest:</b>\n🌀 Use /{msg.command[0]} [title] to start search with title.\n\n"
msgs = ""
await m.delete()
for c, i in enumerate(data, start=1):
msgs += f"<b>{c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Genre:</b> <code>{i['genre']}</code>\n<b>Extract:</b> <code>/{msg.command[0]}_scrap {i['link']}</code>\n\n"
if len(head.encode("utf-8") + msgs.encode("utf-8")) >= 4000:
await msg.reply(
head + msgs,
True,
disable_web_page_preview=True,
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
text="❌ Close",
callback_data=f"close#{msg.from_user.id}",
)
]
]
),
)
await asyncio.sleep(2)
msgs = ""
if msgs != "":
await msg.reply(head + msgs, True, disable_web_page_preview=True)
except Exception as e:
LOGGER.error(e)
await m.delete()
await msg.reply(f"ERROR: <code>{e}</code>", True)
# Broken
@app.on_message(filters.command(["ngefilm21"], COMMAND_HANDLER))
async def ngefilm21(_, message):
if len(message.command) == 1:
return await message.reply("Masukkan query yang akan dicari..!!")
title = message.text.split(" ", maxsplit=1)[1]
msg = await message.reply("Sedang proses scrap, mohon tunggu..")
try:
html = await http.get(f"https://ngefilm.info/search?q={title}", headers=headers)
soup = BeautifulSoup(html.text, "lxml")
res = soup.find_all("h2")
data = []
for i in res:
a = i.find_all("a")[0]
judul = a.find_all(class_="r-snippetized")
b = i.find_all("a")[0]["href"]
data.append({"judul": judul[0].text, "link": b})
if not data:
return await msg.edit("Oops, data film tidak ditemukan.")
res = "".join(f"<b>{i['judul']}</b>\n{i['link']}\n" for i in data)
await msg.edit(
f"<b>Hasil Scrap dari Ngefilm21:</b>\n{res}",
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
text="❌ Close",
callback_data=f"close#{message.from_user.id}",
)
]
]
),
)
except Exception as e:
await msg.edit(f"ERROR: {str(e)}")
# Scrape Web From Movieku.CC
@app.on_message(filters.command(["movieku"], COMMAND_HANDLER))
async def movikucc(_, msg):
m = await msg.reply("**__⏳ Please wait, scraping data ...__**", True)
data = []
if len(msg.command) == 1:
try:
html = await http.get("https://107.152.37.223/")
r = BeautifulSoup(html.text, "lxml")
res = r.find_all(class_="bx")
for i in res:
judul = i.find_all("a")[0]["title"]
link = i.find_all("a")[0]["href"]
data.append({"judul": judul, "link": link})
if not data:
await m.delete()
return await msg.reply("404 Result not FOUND!", True)
await m.delete()
head = f"<b>#Movieku Latest:</b>\n🌀 Use /{msg.command[0]} [title] to start search with title.\n\n"
msgs = ""
for c, i in enumerate(data, start=1):
msgs += f"<b>{c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Extract:</b> <code>/{msg.command[0]}_scrap {i['link']}</code>\n\n"
if len(head.encode("utf-8") + msgs.encode("utf-8")) >= 4000:
await msg.reply(
head + msgs,
True,
disable_web_page_preview=True,
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
text="❌ Close",
callback_data=f"close#{msg.from_user.id}",
)
]
]
),
)
await asyncio.sleep(2)
msgs = ""
if msgs != "":
await msg.reply(
head + msgs,
True,
disable_web_page_preview=True,
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
text="❌ Close",
callback_data=f"close#{msg.from_user.id}",
)
]
]
),
)
except Exception as e:
LOGGER.error(e)
await m.delete()
await msg.reply(f"ERROR: {e}", True)
else:
title = msg.text.split(" ", 1)[1]
try:
html = await http.get(f"https://107.152.37.223/?s={title}")
r = BeautifulSoup(html.text, "lxml")
res = r.find_all(class_="bx")
for i in res:
judul = i.find_all("a")[0]["title"]
link = i.find_all("a")[0]["href"]
data.append({"judul": judul, "link": link})
if not data:
await m.delete()
return await msg.reply("404 Result not FOUND!", True)
await m.delete()
head = f"<b>#Movieku Results For:</b> <code>{title}</code>\n\n"
msgs = ""
for c, i in enumerate(data, start=1):
msgs += f"<b>{c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Extract:</b> <code>/{msg.command[0]}_scrap {i['link']}</code>\n\n"
if len(head.encode("utf-8") + msgs.encode("utf-8")) >= 4000:
await msg.reply(
head + msgs,
True,
disable_web_page_preview=True,
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
text="❌ Close",
callback_data=f"close#{msg.from_user.id}",
)
]
]
),
)
await asyncio.sleep(2)
msgs = ""
if msgs != "":
await msg.reply(
head + msgs,
True,
disable_web_page_preview=True,
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
text="❌ Close",
callback_data=f"close#{msg.from_user.id}",
)
]
]
),
)
except Exception as e:
LOGGER.error(e)
await m.delete()
await msg.reply(f"ERROR: {e}", True)
await message.reply(f"ERROR: {err}")