mirror of
https://github.com/yasirarism/MissKatyPyro.git
synced 2025-12-29 17:44:50 +00:00
tes fix kuso extract
This commit is contained in:
parent
f0f7e1a682
commit
4dddaea6e6
2 changed files with 121 additions and 177 deletions
|
|
@ -8,111 +8,56 @@ from telegraph.aio import Telegraph
|
|||
|
||||
from misskaty import BOT_USERNAME
|
||||
from misskaty.helper.http import fetch
|
||||
from misskaty.helper.media_helper import post_to_telegraph
|
||||
|
||||
LOGGER = logging.getLogger("MissKaty")
|
||||
|
||||
headers = {
|
||||
"Accept": "*/*",
|
||||
"User-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36 Edge/18.19582",
|
||||
}
|
||||
|
||||
|
||||
async def kusonimeBypass(url: str, slug=None):
|
||||
async def kusonimeBypass(url: str):
|
||||
result = {}
|
||||
_url = url
|
||||
if slug:
|
||||
noslug_url = "https://kusonime.com/{slug}"
|
||||
_url = noslug_url.format({"slug": slug})
|
||||
page = await fetch.get(url)
|
||||
if page.status_code != 200:
|
||||
raise Exception(f"ERROR: Hostname might be blocked by server!")
|
||||
try:
|
||||
page = await fetch.get(_url, headers=headers)
|
||||
soup = BeautifulSoup(page.text, "lxml")
|
||||
thumb = soup.find("div", {"class": "post-thumb"}).find("img").get("src")
|
||||
data = []
|
||||
# title = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > p:nth-child(3) > strong")[0].text.strip()
|
||||
try:
|
||||
title = soup.find("h1", {"class": "jdlz"}).text # fix title njing haha
|
||||
season = (
|
||||
soup.select(
|
||||
"#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(3)"
|
||||
)[0]
|
||||
.text.split(":")
|
||||
.pop()
|
||||
.strip()
|
||||
)
|
||||
tipe = (
|
||||
soup.select(
|
||||
"#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(5)"
|
||||
)[0]
|
||||
.text.split(":")
|
||||
.pop()
|
||||
.strip()
|
||||
)
|
||||
status_anime = (
|
||||
soup.select(
|
||||
"#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(6)"
|
||||
)[0]
|
||||
.text.split(":")
|
||||
.pop()
|
||||
.strip()
|
||||
)
|
||||
ep = (
|
||||
soup.select(
|
||||
"#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(7)"
|
||||
)[0]
|
||||
.text.split(":")
|
||||
.pop()
|
||||
.strip()
|
||||
)
|
||||
score = (
|
||||
soup.select(
|
||||
"#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(8)"
|
||||
)[0]
|
||||
.text.split(":")
|
||||
.pop()
|
||||
.strip()
|
||||
)
|
||||
duration = (
|
||||
soup.select(
|
||||
"#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(9)"
|
||||
)[0]
|
||||
.text.split(":")
|
||||
.pop()
|
||||
.strip()
|
||||
)
|
||||
rilis = (
|
||||
soup.select(
|
||||
"#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(10)"
|
||||
)[0]
|
||||
.text.split(":")
|
||||
.pop()
|
||||
.strip()
|
||||
)
|
||||
season = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(3)")[0].text.split(":").pop().strip()
|
||||
tipe = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(5)")[0].text.split(":").pop().strip()
|
||||
status_anime = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(6)")[0].text.split(":").pop().strip()
|
||||
ep = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(7)")[0].text.split(":").pop().strip()
|
||||
score = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(8)")[0].text.split(":").pop().strip()
|
||||
duration = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(9)")[0].text.split(":").pop().strip()
|
||||
rilis = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(10)")[0].text.split(":").pop().strip()
|
||||
except Exception:
|
||||
e = traceback.format_exc()
|
||||
LOGGER.error(e)
|
||||
title, season, tipe, status_anime, ep, score, duration, rilis = (
|
||||
"None",
|
||||
"None",
|
||||
"None",
|
||||
"None",
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
"None",
|
||||
)
|
||||
title, season, tipe, status_anime, ep, score, duration, rilis = "None", "None", "None", "None", 0, 0, 0, "None"
|
||||
num = 1
|
||||
genre = []
|
||||
for _genre in soup.select(
|
||||
"#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(2)"
|
||||
):
|
||||
for _genre in soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(2)"):
|
||||
gen = _genre.text.split(":").pop().strip().split(", ")
|
||||
genre = gen
|
||||
for _, smokedl in enumerate(
|
||||
soup.find("div", {"class": "dlbodz"}).find_all(
|
||||
"div", {"class": "smokeddlrh"}
|
||||
),
|
||||
start=1,
|
||||
):
|
||||
for smokedl in soup.find("div", {"class": "dlbodz"}).find_all("div", {"class": "smokeddlrh"}):
|
||||
if not smokedl:
|
||||
continue
|
||||
mendata = {"name": title, "links": []}
|
||||
for smokeurl in smokedl.find_all("div", {"class": "smokeurl"}):
|
||||
if not smokeurl:
|
||||
continue
|
||||
quality = smokeurl.find("strong").text
|
||||
links = []
|
||||
for link in smokeurl.find_all("a"):
|
||||
url = link.get("href")
|
||||
client = link.text
|
||||
links.append({"client": client, "url": url})
|
||||
mendata["links"].append({"quality": quality, "link_download": links})
|
||||
for smokeurl in smokedl.find_all("div", {"class": "smokeurlrh"}):
|
||||
if not smokeurl:
|
||||
continue
|
||||
quality = smokeurl.find("strong").text
|
||||
links = []
|
||||
for link in smokeurl.find_all("a"):
|
||||
|
|
@ -121,45 +66,62 @@ async def kusonimeBypass(url: str, slug=None):
|
|||
links.append({"client": client, "url": url})
|
||||
mendata["links"].append({"quality": quality, "link_download": links})
|
||||
data.append(mendata)
|
||||
result |= {
|
||||
"error": False,
|
||||
"title": title,
|
||||
"thumb": thumb,
|
||||
"genre": genre,
|
||||
"genre_string": ", ".join(genre),
|
||||
"status_anime": status_anime,
|
||||
"season": season,
|
||||
"tipe": tipe,
|
||||
"ep": ep,
|
||||
"score": score,
|
||||
"duration": duration,
|
||||
"rilis": rilis,
|
||||
"data": data,
|
||||
}
|
||||
except Exception:
|
||||
num += 1
|
||||
for smokedl in soup.find("div", {"class": "dlbodz"}).find_all("div", {"class": "smokeddl"}):
|
||||
if not smokedl:
|
||||
continue
|
||||
mendata = {"name": title, "links": []}
|
||||
for smokeurl in smokedl.find_all("div", {"class": "smokeurl"}):
|
||||
if not smokeurl:
|
||||
continue
|
||||
quality = smokeurl.find("strong").text
|
||||
links = []
|
||||
for link in smokeurl.find_all("a"):
|
||||
url = link.get("href")
|
||||
client = link.text
|
||||
links.append({"client": client, "url": url})
|
||||
mendata["links"].append({"quality": quality, "link_download": links})
|
||||
for smokeurl in smokedl.find_all("div", {"class": "smokeurlrh"}):
|
||||
if not smokeurl:
|
||||
continue
|
||||
quality = smokeurl.find("strong").text
|
||||
links = []
|
||||
for link in smokeurl.find_all("a"):
|
||||
url = link.get("href")
|
||||
client = link.text
|
||||
links.append({"client": client, "url": url})
|
||||
mendata["links"].append({"quality": quality, "link_download": links})
|
||||
data.append(mendata)
|
||||
num += 1
|
||||
result.update({"title": title, "thumb": thumb, "genre": genre, "genre_string": ", ".join(genre), "status_anime": status_anime, "season": season, "tipe": tipe, "ep": ep, "score": score, "duration": duration, "rilis": rilis, "data": data})
|
||||
except Exception as e:
|
||||
if len(result) != 0:
|
||||
result.clear()
|
||||
err = traceback.format_exc()
|
||||
LOGGER.error(err)
|
||||
result |= {"error": True, "error_message": err}
|
||||
await http.delete(_url)
|
||||
return result
|
||||
page.close()
|
||||
LOGGER.error(f"class: {e.__class__.__name_}, {err}")
|
||||
raise Exception(f"ERROR: {err}")
|
||||
finally:
|
||||
page.close()
|
||||
return result
|
||||
|
||||
|
||||
async def byPassPh(url: str, name: str):
|
||||
async def byPassPh(url: str, name: str) -> Optional[str]:
|
||||
kusonime = await kusonimeBypass(url)
|
||||
results = {"error": True, "error_message": kusonime}
|
||||
if not kusonime["error"]:
|
||||
template = """
|
||||
if not isinstance(kusonime, dict):
|
||||
return kusonime
|
||||
template = """
|
||||
<img src={{{thumb}}}>
|
||||
|
||||
<p><b>Title</b> : <code>{{title}}</code></p>
|
||||
<p><b>Genre</b> : <code>{{genre_string}}</code></p>
|
||||
<br><br><p><b>Season</b> : <code>{{season}}</code></p>
|
||||
<br><br><p><b>Type</b> : <code>{{tipe}}</code></p>
|
||||
<br><br><p><b>Status</b> : <code>{{status_anime}}</code></p>
|
||||
<br><br><p><b>Total Episode</b> : <code>{{ep}}</code></p>
|
||||
<br><br><p><b>Score</b> : <code>{{score}}</code></p>
|
||||
<br><br><p><b>Duration</b> : <code>{{duration}}</code></p>
|
||||
<br><br><p><b>Released on</b> : <code>{{rilis}}</code></p>
|
||||
<br><p><b>Season</b> : <code>{{season}}</code></p>
|
||||
<br><p><b>Type</b> : <code>{{tipe}}</code></p>
|
||||
<br><p><b>Status</b> : <code>{{status_anime}}</code></p>
|
||||
<br><p><b>Total Episode</b> : <code>{{ep}}</code></p>
|
||||
<br><p><b>Score</b> : <code>{{score}}</code></p>
|
||||
<br><p><b>Duration</b> : <code>{{duration}}</code></p>
|
||||
<br><p><b>Released on</b> : <code>{{rilis}}</code></p>
|
||||
<br><br>
|
||||
{{#data}}
|
||||
<h4>{{name}}</h4>
|
||||
|
|
@ -172,16 +134,10 @@ async def byPassPh(url: str, name: str):
|
|||
<br>
|
||||
{{/data}}
|
||||
""".strip()
|
||||
html = chevron.render(template, kusonime)
|
||||
telegraph = Telegraph()
|
||||
if not telegraph.get_access_token():
|
||||
await telegraph.create_account(short_name=BOT_USERNAME)
|
||||
page = await telegraph.create_page(
|
||||
f"{kusonime.get('title')} By {escape(name)}", html_content=html
|
||||
)
|
||||
results |= {"error": False, "url": f'https://telegra.ph/{page["path"]}'}
|
||||
del results["error_message"]
|
||||
return results
|
||||
plink = await post_to_telegraph(
|
||||
False, f"{kusonime.get('title')} By {escape(name)}", render(template, kusonime))
|
||||
)
|
||||
return "https://telegra.ph/{}".format(plink)
|
||||
|
||||
|
||||
class Kusonime:
|
||||
|
|
|
|||
|
|
@ -185,13 +185,12 @@ async def getDataPahe(msg, kueri, CurrentPage, strings):
|
|||
async def getDataKuso(msg, kueri, CurrentPage, user, strings):
|
||||
if not SCRAP_DICT.get(msg.id):
|
||||
kusodata = []
|
||||
try:
|
||||
data = await fetch.get(
|
||||
f"{web['kusonime']}/?s={kueri}", follow_redirects=True
|
||||
)
|
||||
except Exception as err:
|
||||
data = await fetch.get(
|
||||
f"{web['kusonime']}/?s={kueri}", follow_redirects=True
|
||||
)
|
||||
if data.status_code != 200:
|
||||
await msg.edit_msg(strings("err_getweb").format(err=err))
|
||||
return None, None
|
||||
return None, 0, None, None
|
||||
res = BeautifulSoup(data, "lxml").find_all("h2", {"class": "episodeye"})
|
||||
for i in res:
|
||||
ress = i.find_all("a")[0]
|
||||
|
|
@ -238,11 +237,10 @@ async def getDataKuso(msg, kueri, CurrentPage, user, strings):
|
|||
async def getDataMovieku(msg, kueri, CurrentPage, strings):
|
||||
if not SCRAP_DICT.get(msg.id):
|
||||
moviekudata = []
|
||||
try:
|
||||
data = await fetch.get(
|
||||
f"{web['movieku']}/?s={kueri}", follow_redirects=True
|
||||
)
|
||||
except Exception as err:
|
||||
data = await fetch.get(
|
||||
f"{web['movieku']}/?s={kueri}", follow_redirects=True
|
||||
)
|
||||
if data.status_code != 200:
|
||||
await msg.edit_msg(strings("err_getweb").format(err=err))
|
||||
return None, None
|
||||
r = BeautifulSoup(data, "lxml")
|
||||
|
|
@ -279,12 +277,10 @@ async def getDataMovieku(msg, kueri, CurrentPage, strings):
|
|||
async def getDataNodrakor(msg, kueri, CurrentPage, user, strings):
|
||||
if not SCRAP_DICT.get(msg.id):
|
||||
nodrakordata = []
|
||||
try:
|
||||
data = await fetch.get(
|
||||
f"{web['nodrakor']}/?s={kueri}",
|
||||
follow_redirects=True,
|
||||
)
|
||||
except Exception as err:
|
||||
data = await fetch.get(
|
||||
f"{web['nodrakor']}/?s={kueri}", follow_redirects=True,
|
||||
)
|
||||
if data.status_code != 200:
|
||||
await msg.edit_msg(strings("err_getweb").format(err=err))
|
||||
return None, 0, None
|
||||
text = BeautifulSoup(data, "lxml")
|
||||
|
|
@ -331,12 +327,10 @@ async def getDataNodrakor(msg, kueri, CurrentPage, user, strings):
|
|||
async def getDataSavefilm21(msg, kueri, CurrentPage, user, strings):
|
||||
if not SCRAP_DICT.get(msg.id):
|
||||
sfdata = []
|
||||
try:
|
||||
data = await fetch.get(
|
||||
f"{web['savefilm21']}/?s={kueri}",
|
||||
follow_redirects=True,
|
||||
)
|
||||
except Exception as err:
|
||||
data = await fetch.get(
|
||||
f"{web['savefilm21']}/?s={kueri}", follow_redirects=True,
|
||||
)
|
||||
if data.status_code != 200:
|
||||
await msg.edit_msg(strings("err_getweb").format(err=err))
|
||||
return None, 0, None
|
||||
text = BeautifulSoup(data, "lxml")
|
||||
|
|
@ -382,15 +376,13 @@ async def getDataSavefilm21(msg, kueri, CurrentPage, user, strings):
|
|||
# Lendrive GetData
|
||||
async def getDataLendrive(msg, kueri, CurrentPage, user, strings):
|
||||
if not SCRAP_DICT.get(msg.id):
|
||||
try:
|
||||
if kueri:
|
||||
data = await fetch.get(
|
||||
f"{web['lendrive']}/?s={kueri}",
|
||||
follow_redirects=True,
|
||||
)
|
||||
else:
|
||||
data = await fetch.get(web["lendrive"], follow_redirects=True)
|
||||
except Exception as err:
|
||||
if kueri:
|
||||
data = await fetch.get(
|
||||
f"{web['lendrive']}/?s={kueri}", follow_redirects=True,
|
||||
)
|
||||
else:
|
||||
data = await fetch.get(web["lendrive"], follow_redirects=True)
|
||||
if data.status_code != 200:
|
||||
await msg.edit_msg(strings("err_getweb").format(err=err))
|
||||
return None, 0, None
|
||||
res = BeautifulSoup(data, "lxml")
|
||||
|
|
@ -442,12 +434,10 @@ async def getDataLendrive(msg, kueri, CurrentPage, user, strings):
|
|||
# MelongMovie GetData
|
||||
async def getDataMelong(msg, kueri, CurrentPage, user, strings):
|
||||
if not SCRAP_DICT.get(msg.id):
|
||||
try:
|
||||
data = await fetch.get(
|
||||
f"{web['melongmovie']}/?s={kueri}",
|
||||
follow_redirects=True,
|
||||
)
|
||||
except Exception as err:
|
||||
data = await fetch.get(
|
||||
f"{web['melongmovie']}/?s={kueri}", follow_redirects=True,
|
||||
)
|
||||
if data.status_code != 200:
|
||||
await msg.edit_msg(strings("err_getweb").format(err=err))
|
||||
return None, 0, None
|
||||
bs4 = BeautifulSoup(data, "lxml")
|
||||
|
|
@ -492,13 +482,12 @@ async def getDataMelong(msg, kueri, CurrentPage, user, strings):
|
|||
# GoMov GetData
|
||||
async def getDataGomov(msg, kueri, CurrentPage, user, strings):
|
||||
if not SCRAP_DICT.get(msg.id):
|
||||
try:
|
||||
gomovv = await fetch.get(
|
||||
f"{web['gomov']}/?s={kueri}", follow_redirects=True
|
||||
)
|
||||
except Exception as err:
|
||||
gomovv = await fetch.get(
|
||||
f"{web['gomov']}/?s={kueri}", follow_redirects=True
|
||||
)
|
||||
if gomovv.status_code != 200:
|
||||
await msg.edit_msg(strings("err_getweb").format(err=err))
|
||||
return None, None
|
||||
return None, 0, None
|
||||
text = BeautifulSoup(gomovv, "lxml")
|
||||
entry = text.find_all(class_="entry-header")
|
||||
if entry[0].text.strip() == "Nothing Found":
|
||||
|
|
@ -548,12 +537,11 @@ async def getDataGomov(msg, kueri, CurrentPage, user, strings):
|
|||
async def getSame(msg, query, current_page, strings):
|
||||
if not SCRAP_DICT.get(msg.id):
|
||||
cfse = cloudscraper.create_scraper()
|
||||
try:
|
||||
if query:
|
||||
data = cfse.get(f"{web['samehadaku']}/?s={query}")
|
||||
else:
|
||||
data = cfse.get(web["samehadaku"])
|
||||
except Exception as err:
|
||||
if query:
|
||||
data = cfse.get(f"{web['samehadaku']}/?s={query}")
|
||||
else:
|
||||
data = cfse.get(web["samehadaku"])
|
||||
if data.status_code != 200:
|
||||
await msg.edit_msg(strings("err_getweb").format(err=err))
|
||||
return None, None
|
||||
res = BeautifulSoup(data.text, "lxml").find_all(class_="animposx")
|
||||
|
|
|
|||
Loading…
Reference in a new issue