This commit is contained in:
yasir 2023-01-15 16:10:39 +07:00
parent 7b6d035d05
commit c29e21101e
3 changed files with 152 additions and 26 deletions

View file

@ -1,22 +1,3 @@
"""
MIT License
Copyright (c) 2021 TheHamkerCat
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from asyncio import gather
import httpx

View file

@ -0,0 +1,102 @@
import re
import subprocess
import sys
import chevron
import telegraph
import logging
from aiohttp import ClientSession
from misskaty import BOT_USERNAME
from ..plugins.web_scraper import headers
from bs4 import BeautifulSoup as bs4
LOGGER = logging.getLogger(__name__)
telegraph = telegraph.Telegraph()
if telegraph.get_access_token() == None:
token_ph = telegraph.create_account(short_name=BOT_USERNAME)
LOGGER.info(f"kuso_utils: Create TGH Account ..")
headers = {"Accept": "*/*", "User-Agent": headers}
async def kusonimeBypass(url: str, slug=None):
hasil = {}
_url = url
request = ClientSession(headers=headers)
if slug:
noslug_url = "https://kusonime.com/{slug}"
_url = noslug_url.format({"slug": slug})
try:
test = await request.get(_url)
page = await test.text()
soup = bs4(page, "html.parser")
thumb = soup.find("div", {"class": "post-thumb"}).find("img").get("src")
data = []
# title = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > p:nth-child(3) > strong")[0].text.strip()
title = soup.find("h1", {"class": "jdlz"}).text # fix title njing haha
num = 1
genre = []
for _genre in soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(2)"):
gen = _genre.text.split(":").pop().strip().split(", ")
genre = gen
status_anime = soup.select("#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.info > p:nth-child(6)")[0].text.split(":").pop().strip()
for smokedl in soup.find("div", {"class": "dlbod"}).find_all("div", {"class": "smokeddl"}):
titl = soup.select(f"#venkonten > div.vezone > div.venser > div.venutama > div.lexot > div.dlbod > div:nth-child({num}) > div.smokettl")[0].text
titl = re.sub(f"Download", "", titl).strip()
mendata = {"name": titl, "links": []}
for smokeurl in smokedl.find_all("div", {"class": "smokeurl"}):
quality = smokeurl.find("strong").text
links = []
for link in smokeurl.find_all("a"):
url = link.get("href")
client = link.text
links.append({"client": client, "url": url})
mendata["links"].append(dict(quality=quality, link_download=links))
data.append(mendata)
num += 1
hasil.update({"error": False, "title": title, "thumb": thumb, "genre": genre, "genre_string": ", ".join(genre), "status_anim": status_anime, "data": data})
except:
hasil.update({"error": True, "error_message": "kuso bypass error"})
finally:
await request.close()
return hasil
async def byPassPh(url: str, msg_id: int):
kusonime = await kusonimeBypass(url)
results = {"error": True, "error_message": "Post to or create TGH error"}
template = """
<img src={{{thumb}}}>
<p><b>Title</b> : <code>{{title}}</code></p>
<p><b>Genre</b> : <code>{{genre_string}}</code></p>
<p><b>Status</b> : <code>{{status_anime}}</code></p>
<br>
{{#data}}
<h4>{{name}}</h4>
{{#links}}
<p><b>Resolution: {{quality}}</b></p>
{{#link_download}}
<p> <a href="{{url}}">{{client}}</a></p>
{{/link_download}}
{{/links}}
<br>
{{/data}}
""".strip()
if not kusonime["error"]:
html = chevron.render(template, kusonime)
page = telegraph.create_page(f"{kusonime.get('title')}-{msg_id}", html_content=html)
results.update({"error": False, "url": "https://telegra.ph/{}".format(page["path"])})
del results["error_message"]
return results
class Kusonime:
def __init__(self):
pass
async def byPass(self, url):
return await kusonimeBypass(url)
async def telegraph(self, url, msg_id):
return await byPassPh(url, msg_id)

View file

@ -12,6 +12,7 @@ from pykeyboard import InlineKeyboard, InlineButton
from pyrogram import filters
from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup
from misskaty.helper.http import http
from misskaty.helper.kuso_utils import Kusonime
from misskaty import app
from misskaty.vars import COMMAND_HANDLER
from misskaty.core.message_utils import *
@ -162,7 +163,7 @@ async def getDataNodrakor(msg, kueri, CurrentPage):
return None, None
# Kusonime GetData
async def getDataKuso(msg, kueri, CurrentPage):
async def getDataKuso(msg, kueri, CurrentPage, user):
if not SCRAP_DICT.get(msg.id):
kusodata = []
data = await http.get(f'https://kusonime.com/?s={kueri}', headers=headers)
@ -174,21 +175,25 @@ async def getDataKuso(msg, kueri, CurrentPage):
kusodata.append({"title": title, "link": link})
if not kusodata:
await editPesan(msg, "Sorry could not find any results!")
return None, None
return None, 0, None
SCRAP_DICT[msg.id] = [split_arr(kusodata, 6), kueri]
try:
index = int(CurrentPage - 1)
PageLen = len(SCRAP_DICT[msg.id][0])
extractbtn = []
kusoResult = f"<b>#Kusonime Results For:</b> <code>{kueri}</code>\n\n" if kueri == "" else f"<b>#Kusonime Results For:</b> <code>{kueri}</code>\n\n"
for c, i in enumerate(SCRAP_DICT[msg.id][0][index], start=1):
kusoResult += f"<b>{c}</b>. {i['title']}\n{i['link']}\n\n"
extractbtn.append(
InlineButton(c, f"kusoextract#{CurrentPage}#{c}#{user}#{msg.id}")
)
IGNORE_CHAR = "[]"
kusoResult = ''.join(i for i in kusoResult if not i in IGNORE_CHAR)
return kusoResult, PageLen
except (IndexError, KeyError):
await editPesan(msg, "Sorry could not find any matching results!")
return None, None
return None, 0, None
# Movieku GetData
async def getDataMovieku(msg, kueri, CurrentPage):
@ -542,10 +547,12 @@ async def kusonime_s(client, message):
kueri = ""
pesan = await kirimPesan(message, "⏳ Please wait, scraping data from Kusonime..", quote=True)
CurrentPage = 1
kusores, PageLen = await getDataKuso(pesan, kueri, CurrentPage)
kusores, PageLen, btn = await getDataKuso(pesan, kueri, CurrentPage, message.from_user.id)
if not kusores: return
keyboard = InlineKeyboard()
keyboard.paginate(PageLen, CurrentPage, 'page_kuso#{number}' + f'#{pesan.id}#{message.from_user.id}')
keyboard.row(InlineButton("👇 Extract Data ", "Hmmm"))
keyboard.row(*btn)
keyboard.row(
InlineButton("❌ Close", f"close#{message.from_user.id}")
)
@ -624,12 +631,14 @@ async def kusopage_callback(client, callback_query):
return await callback_query.answer("Invalid callback data, please send CMD again..")
try:
kusores, PageLen = await getDataKuso(callback_query.message, kueri, CurrentPage)
kusores, PageLen, btn = await getDataKuso(callback_query.message, kueri, CurrentPage, callback_query.from_user.id)
except TypeError:
return
keyboard = InlineKeyboard()
keyboard.paginate(PageLen, CurrentPage, 'page_kuso#{number}' + f'#{message_id}#{callback_query.from_user.id}')
keyboard.row(InlineButton("👇 Extract Data ", "Hmmm"))
keyboard.row(*btn)
keyboard.row(
InlineButton("❌ Close", f"close#{callback_query.from_user.id}")
)
@ -852,6 +861,40 @@ async def zonafilmpage_callback(client, callback_query):
await editPesan(callback_query.message, zonafilmres, reply_markup=keyboard)
### Scrape DDL Link From Web ###
# Kusonime DDL
@app.on_callback_query(filters.create(lambda _, __, query: 'kusoextract#' in query.data))
async def kusonime_scrap(_, callback_query):
if callback_query.from_user.id != int(callback_query.data.split('#')[3]):
return await callback_query.answer("Not yours..", True)
idlink = int(callback_query.data.split("#")[2])
message_id = int(callback_query.data.split('#')[4])
CurrentPage = int(callback_query.data.split('#')[1])
try:
link = SCRAP_DICT[message_id][0][CurrentPage-1][idlink-1].get("link")
except KeyError:
return await callback_query.answer("Invalid callback data, please send CMD again..")
data_kuso = {}
kuso = Kusonime()
keyboard = InlineKeyboard()
keyboard.row(
InlineButton("↩️ Back", f"page_kuso#{CurrentPage}#{message_id}#{callback_query.from_user.id}"),
InlineButton("❌ Close", f"close#{callback_query.from_user.id}")
)
try:
init_url = data_kuso.get(link, None)
if init_url:
ph = init_url.get("ph_url")
await editPesan(callback_query.message, f"<b>Scrape result from {link}</b>:\n\n{ph}", reply_markup=keyboard)
tgh = await kuso.telegraph(link, message_id)
if tgh["error"]:
await editPesan(callback_query.message, f"ERROR: {tgh['error_message']}", reply_markup=keyboard)
return
except Exception as err:
await editPesan(callback_query.message, f"ERROR: {err}", reply_markup=keyboard)
return
data_kuso[link] = {"ph_url": tgh["url"]}
await editPesan(callback_query.message, f"<b>Scrape result from {link}</b>:\n\n{tgh['url']}", reply_markup=keyboard)
# Savefilm21 DDL
@app.on_callback_query(filters.create(lambda _, __, query: 'sf21extract#' in query.data))
@ -879,7 +922,7 @@ async def savefilm21_scrap(_, callback_query):
except Exception as err:
await editPesan(callback_query.message, f"ERROR: {err}", reply_markup=keyboard)
return
await editPesan(callback_query.message, f"<b>Hasil Scrap dari {link}</b>:\n\n{res}", reply_markup=keyboard)
await editPesan(callback_query.message, f"<b>Scrape result from {link}</b>:\n\n{res}", reply_markup=keyboard)
# Scrape DDL Link Nodrakor
@app.on_message(filters.command(["nodrakor_scrap"], COMMAND_HANDLER))