This commit is contained in:
yasir 2023-03-16 12:23:39 +07:00
parent a74fb73d25
commit ab8bd8451b
3 changed files with 53 additions and 9 deletions

View file

@ -12,3 +12,4 @@ from .time_gap import *
from .ssgen_helper import *
from .tools import *
from .ytdl_helper import *
from .subscene_helper import *

View file

@ -0,0 +1,43 @@
import cfscrape
from bs4 import BeautifulSoup
async def down_page(url):
f = cfscrape.create_scraper()
resp = f.get(url).text
soup = BeautifulSoup(resp, 'lxml')
maindiv = soup.body.find('div', class_='subtitle').find('div', class_='top left')
title = maindiv.find('div', class_='header').h1.span.text.strip()
try:
imdb = maindiv.find('div', class_='header').h1.a['href']
except TypeError:
imdb = ""
try:
poster = maindiv.find('div', class_='poster').a['href']
except:
poster = ""
try:
author_name = maindiv.find('div', class_='header').ul.find('li', class_='author').a.text.strip()
author_link = f"https://subscene.com{maindiv.find('div', class_='header').ul.find('li', class_='author').a['href']}"
except:
author_link = ""
author_name = "Anonymous"
download_url = f"https://subscene.com{maindiv.find('div', class_='header').ul.find('li', class_='clearfix').find('div', class_='download').a['href']}"
try:
comments = maindiv.find('div', class_='header').ul.find('li', class_='comment-wrapper').find('div',
class_='comment').text
except:
comments = ""
try:
release = maindiv.find('div', class_='header').ul.find('li', class_='release').find_all('div')
releases = ""
for i in range(2):
r = release[i].text.strip()
releases = releases + f"\n{r}"
except Exception as e:
releases = ""
response = {"title": title, "imdb": imdb, "poster": poster, "author_name": author_name,
"author_url": author_link, "download_url": download_url, "comments": comments, "releases": releases}
return response

View file

@ -1,7 +1,8 @@
import logging, os
import cfscrape
import cfscrape, aiofiles
from bs4 import BeautifulSoup
from misskaty.helper.subscene_helper import down_page
from pykeyboard import InlineButton, InlineKeyboard
from pyrogram import filters
@ -187,12 +188,11 @@ async def dlsub_callback(client, callback_query):
await asyncio.sleep(3)
return await callback_query.message.delete()
scraper = cfscrape.create_scraper()
req = scraper.get(link).text
soup = BeautifulSoup(req,"lxml")
judul = soup.find("div", {"class": "bread"}).find("a").get("href").split("/")[4]
downloadlink = soup.find("div", {"class": "download"}).find('a')
download = 'https://subscene.com'+downloadlink['href']
dl = scraper.get(download)
open(f"{judul}.zip", "wb").write(dl.content)
await callback_query.message.reply_document(f"{judul}.zip")
res = await down_page(link)
judul = res.get("title")
dl = scraper.get(res.get("download_url"))
f = await aiofiles.open({judul}.zip, mode='wb')
await f.write(dl.content)
await f.close()
await callback_query.message.reply_document(f"{judul}.zip", caption=f"Title: {judul}\nIMDb: {res['imdb']}\nAuthor: {res['author_name']}")
os.remove(f"{judul}.zip")