Compare commits
No commits in common. "master" and "webserver" have entirely different histories.
@ -1,6 +1,5 @@
|
|||||||
from bs4 import BeautifulSoup as bs
|
from bs4 import BeautifulSoup as bs
|
||||||
import csv, jsbeautifier, cloudscraper, os, re, smtplib
|
import csv, jsbeautifier, cloudscraper, os, re, smtplib
|
||||||
#from selenium import webdriver
|
|
||||||
from email.mime.text import MIMEText
|
from email.mime.text import MIMEText
|
||||||
from json import loads
|
from json import loads
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@ -19,12 +18,8 @@ def get_config():
|
|||||||
return configs
|
return configs
|
||||||
|
|
||||||
def get_soup(url): #dall'url outputta la soup della pagina
|
def get_soup(url): #dall'url outputta la soup della pagina
|
||||||
#driver.get(url)
|
r = cloudscraper.create_scraper()
|
||||||
#data = driver.page_source
|
page=r.get(url)
|
||||||
r = cloudscraper.create_scraper(session)
|
|
||||||
headers = {"User-Agent":"Mozilla/5.0 (X11; Linux x86_64; rv:135.0) Gecko/20100101 Firefox/135.0"}
|
|
||||||
cookies = {"ASNew-q8":"2fa7ff5d81fa1a0db26b3f696a98dec9"}
|
|
||||||
page=r.get(url, headers=headers, cookies=cookies)
|
|
||||||
data=page.text
|
data=page.text
|
||||||
return bs(data, features="html.parser")
|
return bs(data, features="html.parser")
|
||||||
|
|
||||||
@ -115,8 +110,9 @@ def scarica(lista,configs): #la lista sarà nella forma filepath,url e prova in
|
|||||||
def modulo_scarica():
|
def modulo_scarica():
|
||||||
file = get_animu()
|
file = get_animu()
|
||||||
configs = get_config()
|
configs = get_config()
|
||||||
|
#lines 0 è url ep, lines 1 è cartella download, lines 2 è nome anime
|
||||||
lista_email=[]
|
lista_email=[]
|
||||||
for lines in file[1:]: #lines 0 è url, lines 1 è cart down, lines 2 è naming episodi, lines 3 è abilitato?, lines 4 è jellynaming?
|
for lines in file[1:]:
|
||||||
ora=datetime.now()
|
ora=datetime.now()
|
||||||
orario=ora.strftime("%d/%m/%Y %H:%M:%S")
|
orario=ora.strftime("%d/%m/%Y %H:%M:%S")
|
||||||
if int(lines[3]):
|
if int(lines[3]):
|
||||||
@ -150,7 +146,7 @@ def modulo_scarica():
|
|||||||
results.append((filepath,episodi))
|
results.append((filepath,episodi))
|
||||||
for ep in results:
|
for ep in results:
|
||||||
scarica(ep,configs)
|
scarica(ep,configs)
|
||||||
lista_email.append((ep[0],0))
|
lista_email.append((os.path.basename(ep[0]),0))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
lista_email.append((lines[1],1))
|
lista_email.append((lines[1],1))
|
||||||
@ -187,32 +183,5 @@ def modulo_scarica():
|
|||||||
new_email(subject,body,configs)
|
new_email(subject,body,configs)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
########################to correct the tls error
|
|
||||||
#import cloudscraper
|
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
from requests.packages.urllib3.poolmanager import PoolManager
|
|
||||||
import ssl
|
|
||||||
|
|
||||||
class SSLAdapter(HTTPAdapter):
|
|
||||||
def init_poolmanager(self, connections, maxsize, block=False):
|
|
||||||
self.poolmanager = PoolManager(
|
|
||||||
num_pools=connections,
|
|
||||||
maxsize=maxsize,
|
|
||||||
block=block,
|
|
||||||
ssl_version=ssl.PROTOCOL_TLS,
|
|
||||||
ciphers='ECDHE-RSA-AES128-GCM-SHA256')
|
|
||||||
|
|
||||||
|
|
||||||
#firefox_options = webdriver.FirefoxOptions()
|
|
||||||
#firefox_options.add_argument("--profile=/opt/saturn_cli/selenium_profile")
|
|
||||||
#driver = webdriver.Chrome()
|
|
||||||
|
|
||||||
|
|
||||||
session = cloudscraper.Session()
|
|
||||||
session.mount('https://', SSLAdapter())
|
|
||||||
#response = session.get('https://example.com')
|
|
||||||
#print(response.text)
|
|
||||||
########################
|
|
||||||
modulo_scarica()
|
modulo_scarica()
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user