Remove unused code in scraper

Signed-off-by: Yasir Aris M <git@yasirdev.my.id>
This commit is contained in:
Yasir Aris M 2023-10-11 11:39:24 +07:00
parent 65de277af6
commit 3186e8820b

View file

@ -102,7 +102,6 @@ async def getDataTerbit21(msg, kueri, CurrentPage, strings):
if re.search(r"Complete|Ongoing", i["kategori"]) if re.search(r"Complete|Ongoing", i["kategori"])
else f"<b><a href='{i['dl']}'>{strings('dl_text')}</a></b>\n\n" else f"<b><a href='{i['dl']}'>{strings('dl_text')}</a></b>\n\n"
) )
TerbitRes = "".join(i for i in TerbitRes if i not in "[]")
return TerbitRes, PageLen return TerbitRes, PageLen
@ -123,28 +122,22 @@ async def getDatalk21(msg, kueri, CurrentPage, strings):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
return None, None return None, None
SCRAP_DICT.add(msg.id, [split_arr(res["result"], 6), kueri], timeout=1800) SCRAP_DICT.add(msg.id, [split_arr(res["result"], 6), kueri], timeout=1800)
try: index = int(CurrentPage - 1)
index = int(CurrentPage - 1) PageLen = len(SCRAP_DICT[msg.id][0])
PageLen = len(SCRAP_DICT[msg.id][0]) if kueri:
lkResult = strings("header_with_query").format(
if kueri: web="Layarkaca21", kueri=kueri
lkResult = strings("header_with_query").format( )
web="Layarkaca21", kueri=kueri else:
) lkResult = strings("header_no_query").format(web="Layarkaca21", cmd="lk21")
else: for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
lkResult = strings("header_no_query").format(web="Layarkaca21", cmd="lk21") lkResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>{strings('cat_text')}:</b> <code>{i['kategori']}</code>\n"
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1): lkResult += (
lkResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>{strings('cat_text')}:</b> <code>{i['kategori']}</code>\n" "\n"
lkResult += ( if re.search(r"Complete|Ongoing", i["kategori"])
"\n" else f"<b><a href='{i['dl']}'>{strings('dl_text')}</a></b>\n\n"
if re.search(r"Complete|Ongoing", i["kategori"]) )
else f"<b><a href='{i['dl']}'>{strings('dl_text')}</a></b>\n\n" return lkResult, PageLen
)
lkResult = "".join(i for i in lkResult if i not in "[]")
return lkResult, PageLen
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, None
# Pahe GetData # Pahe GetData
@ -164,24 +157,18 @@ async def getDataPahe(msg, kueri, CurrentPage, strings):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
return None, None return None, None
SCRAP_DICT.add(msg.id, [split_arr(res["result"], 6), kueri], timeout=1800) SCRAP_DICT.add(msg.id, [split_arr(res["result"], 6), kueri], timeout=1800)
try: index = int(CurrentPage - 1)
index = int(CurrentPage - 1) PageLen = len(SCRAP_DICT[msg.id][0])
PageLen = len(SCRAP_DICT[msg.id][0]) paheResult = (
strings("header_with_query").format(web="Pahe", kueri=kueri)
paheResult = ( if kueri
strings("header_with_query").format(web="Pahe", kueri=kueri) else strings("header_no_query").format(web="Pahe", cmd="pahe")
if kueri )
else strings("header_no_query").format(web="Pahe", cmd="pahe") for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
paheResult += (
f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n\n"
) )
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1): return paheResult, PageLen
paheResult += (
f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n\n"
)
paheResult = "".join(i for i in paheResult if i not in "[]")
return paheResult, PageLen
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, None
# Kusonime GetData # Kusonime GetData
@ -206,36 +193,31 @@ async def getDataKuso(msg, kueri, CurrentPage, user, strings):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
return None, 0, None, None return None, 0, None, None
SCRAP_DICT.add(msg.id, [split_arr(kusodata, 10), kueri], timeout=1800) SCRAP_DICT.add(msg.id, [split_arr(kusodata, 10), kueri], timeout=1800)
try: index = int(CurrentPage - 1)
index = int(CurrentPage - 1) PageLen = len(SCRAP_DICT[msg.id][0])
PageLen = len(SCRAP_DICT[msg.id][0]) extractbtn1 = []
extractbtn1 = [] extractbtn2 = []
extractbtn2 = []
kusoResult = ( kusoResult = (
strings("header_no_query").format(web="Kusonime", cmd="kusonime") strings("header_no_query").format(web="Kusonime", cmd="kusonime")
if kueri == "" if kueri == ""
else strings("header_with_query").format(web="Kusonime", kueri=kueri) else strings("header_with_query").format(web="Kusonime", kueri=kueri)
) )
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1): for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
kusoResult += f"<b>{index*6+c}</b>. {i['title']}\n{i['link']}\n\n" kusoResult += f"<b>{index*6+c}</b>. {i['title']}\n{i['link']}\n\n"
if c < 6: if c < 6:
extractbtn1.append( extractbtn1.append(
InlineButton( InlineButton(
index * 6 + c, f"kusoextract#{CurrentPage}#{c}#{user}#{msg.id}" index * 6 + c, f"kusoextract#{CurrentPage}#{c}#{user}#{msg.id}"
)
) )
else: )
extractbtn2.append( else:
InlineButton( extractbtn2.append(
index * 6 + c, f"kusoextract#{CurrentPage}#{c}#{user}#{msg.id}" InlineButton(
) index * 6 + c, f"kusoextract#{CurrentPage}#{c}#{user}#{msg.id}"
) )
kusoResult = "".join(i for i in kusoResult if i not in "[]") )
return kusoResult, PageLen, extractbtn1, extractbtn2 return kusoResult, PageLen, extractbtn1, extractbtn2
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, 0, None, None
# Movieku GetData # Movieku GetData
@ -262,22 +244,17 @@ async def getDataMovieku(msg, kueri, CurrentPage, strings):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
return None, None return None, None
SCRAP_DICT.add(msg.id, [split_arr(moviekudata, 6), kueri], timeout=1800) SCRAP_DICT.add(msg.id, [split_arr(moviekudata, 6), kueri], timeout=1800)
try: index = int(CurrentPage - 1)
index = int(CurrentPage - 1) PageLen = len(SCRAP_DICT[msg.id][0])
PageLen = len(SCRAP_DICT[msg.id][0])
moviekuResult = ( moviekuResult = (
strings("header_no_query").format(web="Movieku", cmd="movieku") strings("header_no_query").format(web="Movieku", cmd="movieku")
if kueri == "" if kueri == ""
else strings("header_with_query").format(web="Movieku", kueri=kueri) else strings("header_with_query").format(web="Movieku", kueri=kueri)
) )
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1): for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
moviekuResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>{strings('quality')}/Status:</b> {i['type']}\n<b>Extract:</b> <code>/movieku_scrap {i['link']}</code>\n\n" moviekuResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>{strings('quality')}/Status:</b> {i['type']}\n<b>Extract:</b> <code>/movieku_scrap {i['link']}</code>\n\n"
moviekuResult = "".join(i for i in moviekuResult if i not in "[]") return moviekuResult, PageLen
return moviekuResult, PageLen
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, None
# NoDrakor GetData # NoDrakor GetData
@ -304,32 +281,27 @@ async def getDataNodrakor(msg, kueri, CurrentPage, user, strings):
return None, 0, None return None, 0, None
for i in entry: for i in entry:
genre = i.find(class_="gmr-movie-on") genre = i.find(class_="gmr-movie-on")
genre = f"{genre.text}" if genre != "" else "N/A" genre = f"{genre.text}" if genre else "N/A"
judul = i.find(class_="entry-title").find("a").text judul = i.find(class_="entry-title").find("a").text
link = i.find(class_="entry-title").find("a").get("href") link = i.find(class_="entry-title").find("a").get("href")
nodrakordata.append({"judul": judul, "link": link, "genre": genre}) nodrakordata.append({"judul": judul, "link": link, "genre": genre})
SCRAP_DICT.add(msg.id, [split_arr(nodrakordata, 6), kueri], timeout=1800) SCRAP_DICT.add(msg.id, [split_arr(nodrakordata, 6), kueri], timeout=1800)
try: index = int(CurrentPage - 1)
index = int(CurrentPage - 1) PageLen = len(SCRAP_DICT[msg.id][0])
PageLen = len(SCRAP_DICT[msg.id][0]) extractbtn = []
extractbtn = [] nodrakorResult = (
nodrakorResult = ( strings("header_no_query").format(web="NoDrakor", cmd="nodrakor")
strings("header_no_query").format(web="NoDrakor", cmd="nodrakor") if kueri == ""
if kueri == "" else strings("header_with_query").format(web="NoDrakor", kueri=kueri)
else strings("header_with_query").format(web="NoDrakor", kueri=kueri) )
) for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1): nodrakorResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Genre:</b> {i['genre']}\n\n"
nodrakorResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Genre:</b> {i['genre']}\n\n" extractbtn.append(
extractbtn.append( InlineButton(
InlineButton( index * 6 + c, f"nodrakorextract#{CurrentPage}#{c}#{user}#{msg.id}"
index * 6 + c, f"nodrakorextract#{CurrentPage}#{c}#{user}#{msg.id}"
)
) )
nodrakorResult = "".join(i for i in nodrakorResult if i not in "[]") )
return nodrakorResult, PageLen, extractbtn return nodrakorResult, PageLen, extractbtn
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, 0, None
# Savefilm21 GetData # Savefilm21 GetData
@ -361,27 +333,22 @@ async def getDataSavefilm21(msg, kueri, CurrentPage, user, strings):
link = i.find(class_="entry-title").find("a").get("href") link = i.find(class_="entry-title").find("a").get("href")
sfdata.append({"judul": judul, "link": link, "genre": genre}) sfdata.append({"judul": judul, "link": link, "genre": genre})
SCRAP_DICT.add(msg.id, [split_arr(sfdata, 6), kueri], timeout=1800) SCRAP_DICT.add(msg.id, [split_arr(sfdata, 6), kueri], timeout=1800)
try: index = int(CurrentPage - 1)
index = int(CurrentPage - 1) PageLen = len(SCRAP_DICT[msg.id][0])
PageLen = len(SCRAP_DICT[msg.id][0]) extractbtn = []
extractbtn = [] sfResult = (
sfResult = ( strings("header_no_query").format(web="Savefilm21", cmd="savefilm21")
strings("header_no_query").format(web="Savefilm21", cmd="savefilm21") if kueri == ""
if kueri == "" else strings("header_with_query").format(web="Savefilm21", kueri=kueri)
else strings("header_with_query").format(web="Savefilm21", kueri=kueri) )
) for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1): sfResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Genre:</b> {i['genre']}\n\n"
sfResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Genre:</b> {i['genre']}\n\n" extractbtn.append(
extractbtn.append( InlineButton(
InlineButton( index * 6 + c, f"sf21extract#{CurrentPage}#{c}#{user}#{msg.id}"
index * 6 + c, f"sf21extract#{CurrentPage}#{c}#{user}#{msg.id}"
)
) )
sfResult = "".join(i for i in sfResult if i not in "[]") )
return sfResult, PageLen, extractbtn return sfResult, PageLen, extractbtn
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, 0, None
# Lendrive GetData # Lendrive GetData
@ -420,28 +387,23 @@ async def getDataLendrive(msg, kueri, CurrentPage, user, strings):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
return None, 0, None return None, 0, None
savedict[msg.id] = [split_arr(lenddata, 6), kueri] savedict[msg.id] = [split_arr(lenddata, 6), kueri]
try: index = int(CurrentPage - 1)
index = int(CurrentPage - 1) PageLen = len(savedict[msg.id][0])
PageLen = len(savedict[msg.id][0]) extractbtn = []
extractbtn = []
lenddataResult = ( lenddataResult = (
strings("header_no_query").format(web="Lendrive", cmd="lendrive") strings("header_no_query").format(web="Lendrive", cmd="lendrive")
if kueri == "" if kueri == ""
else strings("header_with_query").format(web="Lendrive", kueri=kueri) else strings("header_with_query").format(web="Lendrive", kueri=kueri)
) )
for c, i in enumerate(savedict[msg.id][0][index], start=1): for c, i in enumerate(savedict[msg.id][0][index], start=1):
lenddataResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>{strings('quality')}:</b> {i['quality']}\n<b>Status:</b> {i['status']}\n\n" lenddataResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>{strings('quality')}:</b> {i['quality']}\n<b>Status:</b> {i['status']}\n\n"
extractbtn.append( extractbtn.append(
InlineButton( InlineButton(
index * 6 + c, f"lendriveextract#{CurrentPage}#{c}#{user}#{msg.id}" index * 6 + c, f"lendriveextract#{CurrentPage}#{c}#{user}#{msg.id}"
)
) )
lenddataResult = "".join(i for i in lenddataResult if i not in "[]") )
return lenddataResult, PageLen, extractbtn return lenddataResult, PageLen, extractbtn
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, 0, None
# MelongMovie GetData # MelongMovie GetData
@ -470,28 +432,23 @@ async def getDataMelong(msg, kueri, CurrentPage, user, strings):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
return None, 0, None return None, 0, None
SCRAP_DICT.add(msg.id, [split_arr(melongdata, 6), kueri], timeout=1800) SCRAP_DICT.add(msg.id, [split_arr(melongdata, 6), kueri], timeout=1800)
try: index = int(CurrentPage - 1)
index = int(CurrentPage - 1) PageLen = len(SCRAP_DICT[msg.id][0])
PageLen = len(SCRAP_DICT[msg.id][0]) extractbtn = []
extractbtn = []
melongResult = ( melongResult = (
strings("header_no_query").format(web="Melongmovie", cmd="melongmovie") strings("header_no_query").format(web="Melongmovie", cmd="melongmovie")
if kueri == "" if kueri == ""
else strings("header_with_query").format(web="Melongmovie", kueri=kueri) else strings("header_with_query").format(web="Melongmovie", kueri=kueri)
) )
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1): for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
melongResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>{strings('quality')}:</b> {i['quality']}\n\n" melongResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>{strings('quality')}:</b> {i['quality']}\n\n"
extractbtn.append( extractbtn.append(
InlineButton( InlineButton(
index * 6 + c, f"melongextract#{CurrentPage}#{c}#{user}#{msg.id}" index * 6 + c, f"melongextract#{CurrentPage}#{c}#{user}#{msg.id}"
)
) )
melongResult = "".join(i for i in melongResult if i not in "[]") )
return melongResult, PageLen, extractbtn return melongResult, PageLen, extractbtn
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, 0, None
# GoMov GetData # GoMov GetData
@ -524,30 +481,25 @@ async def getDataGomov(msg, kueri, CurrentPage, user, strings):
link = i.find(class_="entry-title").find("a").get("href") link = i.find(class_="entry-title").find("a").get("href")
data.append({"judul": judul, "link": link, "genre": genre}) data.append({"judul": judul, "link": link, "genre": genre})
SCRAP_DICT.add(msg.id, [split_arr(data, 6), kueri], timeout=1800) SCRAP_DICT.add(msg.id, [split_arr(data, 6), kueri], timeout=1800)
try: index = int(CurrentPage - 1)
index = int(CurrentPage - 1) PageLen = len(SCRAP_DICT[msg.id][0])
PageLen = len(SCRAP_DICT[msg.id][0]) extractbtn = []
extractbtn = []
gomovResult = ( gomovResult = (
strings("header_with_query").format(web="GoMov", kueri=kueri) strings("header_with_query").format(web="GoMov", kueri=kueri)
if kueri if kueri
else strings("header_no_query").format(web="GoMov", cmd="gomov") else strings("header_no_query").format(web="GoMov", cmd="gomov")
) )
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1): for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
gomovResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Genre:</b> <code>{i['genre']}</code>\n\n" gomovResult += f"<b>{index*6+c}. <a href='{i['link']}'>{i['judul']}</a></b>\n<b>Genre:</b> <code>{i['genre']}</code>\n\n"
if not re.search(r"Series", i["genre"]): if not re.search(r"Series", i["genre"]):
extractbtn.append( extractbtn.append(
InlineButton( InlineButton(
index * 6 + c, f"gomovextract#{CurrentPage}#{c}#{user}#{msg.id}" index * 6 + c, f"gomovextract#{CurrentPage}#{c}#{user}#{msg.id}"
)
) )
)
gomovResult += strings("unsupport_dl_btn") gomovResult += strings("unsupport_dl_btn")
gomovResult = "".join(i for i in gomovResult if i not in "[]") return gomovResult, PageLen, extractbtn
return gomovResult, PageLen, extractbtn
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, 0, None
# getData samehada # getData samehada
@ -575,18 +527,13 @@ async def getSame(msg, query, current_page, strings):
await msg.edit_msg(strings("no_result"), del_in=5) await msg.edit_msg(strings("no_result"), del_in=5)
return None, None return None, None
savedict[msg.id] = [split_arr(sdata, 10), query] savedict[msg.id] = [split_arr(sdata, 10), query]
try: index = int(current_page - 1)
index = int(current_page - 1) PageLen = len(savedict[msg.id][0])
PageLen = len(savedict[msg.id][0]) sameresult = "".join(
sameresult = "".join( f"<b>{index * 6 + c}. <a href='{i['url']}'>{i['title']}</a>\n<b>Status:</b> {i['sta']}\n</b>Rating:</b> {i['rate']}\n\n"
f"<b>{index * 6 + c}. <a href='{i['url']}'>{i['title']}</a>\n<b>Status:</b> {i['sta']}\n</b>Rating:</b> {i['rate']}\n\n"
for c, i in enumerate(savedict[msg.id][0][index], start=1) for c, i in enumerate(savedict[msg.id][0][index], start=1)
) )
sameresult = "".join(i for i in sameresult if i not in "[]") return sameresult, PageLen
return sameresult, PageLen
except (IndexError, KeyError):
await msg.edit_msg(strings("no_result"), del_in=5)
return None, None
# SameHada CMD # SameHada CMD