Changed cfscrape to cloudscraper

This commit is contained in:
yasirarism 2023-05-16 07:26:32 +00:00 committed by GitHub
parent 4b6458fff9
commit 511f33260e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 11 additions and 11 deletions

View file

@ -1,9 +1,9 @@
import cfscrape import cloudscraper
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
async def down_page(url): async def down_page(url):
f = cfscrape.create_scraper() f = cloudscraper.create_scraper()
resp = f.get(url).text resp = f.get(url).text
soup = BeautifulSoup(resp, "lxml") soup = BeautifulSoup(resp, "lxml")
maindiv = soup.body.find("div", class_="subtitle").find("div", class_="top left") maindiv = soup.body.find("div", class_="subtitle").find("div", class_="top left")

View file

@ -7,7 +7,7 @@ import html
import pickle import pickle
import json import json
import traceback import traceback
import cfscrape import cloudscraper
import aiohttp import aiohttp
from datetime import datetime from datetime import datetime
from shutil import disk_usage from shutil import disk_usage
@ -212,7 +212,7 @@ async def cmd_eval(self: Client, ctx: Message, strings) -> Optional[str]:
"re": re, "re": re,
"os": os, "os": os,
"asyncio": asyncio, "asyncio": asyncio,
"cfscrape": cfscrape, "cloudscraper": cloudscraper,
"json": json, "json": json,
"aiohttp": aiohttp, "aiohttp": aiohttp,
"print": _print, "print": _print,

View file

@ -2,7 +2,7 @@ import asyncio
import logging import logging
import os import os
import cfscrape import cloudscraper
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from pykeyboard import InlineButton, InlineKeyboard from pykeyboard import InlineButton, InlineKeyboard
from pyrogram import Client, filters from pyrogram import Client, filters
@ -24,7 +24,7 @@ SUB_DL_DICT = {}
async def getTitleSub(msg, kueri, CurrentPage, user): async def getTitleSub(msg, kueri, CurrentPage, user):
if not SUB_TITLE_DICT.get(msg.id): if not SUB_TITLE_DICT.get(msg.id):
sdata = [] sdata = []
scraper = cfscrape.create_scraper() scraper = cloudscraper.create_scraper()
param = {"query": kueri} param = {"query": kueri}
r = scraper.post("https://subscene.com/subtitles/searchbytitle", data=param).text r = scraper.post("https://subscene.com/subtitles/searchbytitle", data=param).text
soup = BeautifulSoup(r, "lxml") soup = BeautifulSoup(r, "lxml")
@ -61,7 +61,7 @@ async def getTitleSub(msg, kueri, CurrentPage, user):
async def getListSub(msg, link, CurrentPage, user): async def getListSub(msg, link, CurrentPage, user):
if not SUB_DL_DICT.get(msg.id): if not SUB_DL_DICT.get(msg.id):
sdata = [] sdata = []
scraper = cfscrape.create_scraper() scraper = cloudscraper.create_scraper()
kuki = {"LanguageFilter": "13,44,50"} # Only filter language English, Malay, Indonesian kuki = {"LanguageFilter": "13,44,50"} # Only filter language English, Malay, Indonesian
r = scraper.get(link, cookies=kuki).text r = scraper.get(link, cookies=kuki).text
soup = BeautifulSoup(r, "lxml") soup = BeautifulSoup(r, "lxml")
@ -194,7 +194,7 @@ async def dlsub_callback(self: Client, callback_query: CallbackQuery):
await callback_query.answer("Invalid callback data, please send CMD again..") await callback_query.answer("Invalid callback data, please send CMD again..")
await asyncio.sleep(3) await asyncio.sleep(3)
return await callback_query.message.delete_msg() return await callback_query.message.delete_msg()
scraper = cfscrape.create_scraper() scraper = cloudscraper.create_scraper()
res = await down_page(link) res = await down_page(link)
dl = scraper.get(res.get("download_url")) dl = scraper.get(res.get("download_url"))
f = open(f"{title}.zip", mode="wb").write(dl.content) f = open(f"{title}.zip", mode="wb").write(dl.content)

View file

@ -6,7 +6,7 @@
""" """
import re import re
import logging import logging
import cfscrape import cloudscraper
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from pykeyboard import InlineKeyboard, InlineButton from pykeyboard import InlineKeyboard, InlineButton
from pyrogram import filters, Client from pyrogram import filters, Client
@ -384,7 +384,7 @@ async def getDataGomov(msg, kueri, CurrentPage, user, strings):
# getData samehada # getData samehada
async def getSame(msg, query, current_page, strings): async def getSame(msg, query, current_page, strings):
if not SCRAP_DICT.get(msg.id): if not SCRAP_DICT.get(msg.id):
cfse = cfscrape.CloudflareScraper() cfse = cloudscraper.create_scraper()
try: try:
if query: if query:
data = cfse.get(f"{web['samehadaku']}/?s={query}", headers=headers) data = cfse.get(f"{web['samehadaku']}/?s={query}", headers=headers)

View file

@ -26,5 +26,5 @@ deep-translator
telethon telethon
pyrate_limiter pyrate_limiter
cachetools cachetools
cfscrape cloudscraper
openai openai