Add size option + anime-ultime to py3
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
parent
8fedf9222e
commit
bb1d4f6dc7
@ -1,52 +1,53 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import re
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
import requests
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('url', help="url from anime-ultime.net")
|
||||
parser.add_argument("url", help="url from anime-ultime.net")
|
||||
args = parser.parse_args()
|
||||
|
||||
nextHop = True
|
||||
url = args.url
|
||||
root_url = 'http://www.anime-ultime.net'
|
||||
root_url = "http://www.anime-ultime.net"
|
||||
|
||||
while nextHop:
|
||||
r1 = requests.get(url)
|
||||
m1 = re.search(
|
||||
"javascript:open_ddlbox\('dl_orig', '([0-9]+)', 'orig'\)", r1.text
|
||||
)
|
||||
m2 = re.search(
|
||||
'submit.*:right;.*(info-0-1)/([0-9]+)/([^"]+)', r1.text
|
||||
)
|
||||
m1 = re.search("javascript:open_ddlbox('dl_orig', '([0-9]+)', 'orig')", r1.text)
|
||||
m2 = re.search('submit.*:right;.*(info-0-1)/([0-9]+)/([^"]+)', r1.text)
|
||||
|
||||
if m1 is None:
|
||||
break
|
||||
|
||||
requests.post(
|
||||
root_url + '/ddl/authorized_download.php',
|
||||
data={'idfile': m1.group(1), 'type': 'orig'}
|
||||
f"{root_url}/ddl/authorized_download.php",
|
||||
data={"idfile": m1.group(1), "type": "orig"},
|
||||
)
|
||||
|
||||
timeout = 46
|
||||
while timeout > 0:
|
||||
sys.stdout.write('\r')
|
||||
sys.stdout.write("\r")
|
||||
sys.stdout.flush()
|
||||
sys.stdout.write('Waiting ' + str(timeout) + ' seconds ...')
|
||||
sys.stdout.write(f"Waiting {timeout} seconds ...")
|
||||
timeout = timeout - 1
|
||||
time.sleep(1)
|
||||
|
||||
r2 = requests.post(
|
||||
root_url + '/ddl/authorized_download.php',
|
||||
data={'idfile': m1.group(1), 'type': 'orig'}
|
||||
f"{root_url}/ddl/authorized_download.php",
|
||||
data={"idfile": m1.group(1), "type": "orig"},
|
||||
)
|
||||
j = r2.json()
|
||||
subprocess.call('wget -c -t 0 --content-disposition "' +
|
||||
root_url + j['link'] + '"', shell=True)
|
||||
subprocess.call(
|
||||
f"wget -c -t 0 --content-disposition {root_url}{j['link']}", shell=True
|
||||
)
|
||||
|
||||
if m2 is None:
|
||||
nextHop = False
|
||||
sys.stdout.write('\r')
|
||||
sys.stdout.write("\r")
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
url = root_url + '/' + \
|
||||
m2.group(1) + '/' + m2.group(2) + '/' + m2.group(3)
|
||||
url = f"{root_url}/{m2.group(1)}/{m2.group(2)}/{m2.group(3)}"
|
||||
|
@ -1,6 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
@ -15,6 +14,7 @@ BLACKLIST_WORDS = ["dvd", "iso"]
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-u", "--uploader", action="append")
|
||||
parser.add_argument("-y", "--year", type=int)
|
||||
parser.add_argument("-s", "--size", type=int, default=10)
|
||||
parser.add_argument("query")
|
||||
args = parser.parse_args()
|
||||
|
||||
@ -77,7 +77,7 @@ def get_files(id):
|
||||
req = session.get(
|
||||
"https://www5.yggtorrent.fi/engine/get_files", params={"torrent": id}
|
||||
)
|
||||
files = json.loads(req.text)
|
||||
files = req.json()
|
||||
html = BeautifulSoup(files["html"], "html.parser")
|
||||
trs = html.select("tr")
|
||||
return len(trs)
|
||||
@ -112,7 +112,7 @@ def search_ygg(query, multi):
|
||||
size = tds[5].get_text()
|
||||
name = tds[1].get_text().lower().strip()
|
||||
|
||||
if parse_size(size) > parse_size("10Go"):
|
||||
if parse_size(size) > parse_size(f"{args.size}Go"):
|
||||
continue
|
||||
|
||||
if any(word.lower() in name for word in BLACKLIST_WORDS):
|
||||
@ -136,7 +136,7 @@ def search_ygg(query, multi):
|
||||
query_string = {"query": args.query, "filters": "type:movie"}
|
||||
|
||||
if args.year:
|
||||
query_string["filters"] += " AND year:" + str(args.year)
|
||||
query_string["filters"] += f" AND year:{args.year}"
|
||||
|
||||
tvdb = session.post(
|
||||
"https://tvshowtime-dsn.algolia.net/1/indexes/TVDB/query",
|
||||
@ -147,7 +147,7 @@ tvdb = session.post(
|
||||
json={"params": urlencode(query_string)},
|
||||
)
|
||||
|
||||
tvdata = json.loads(tvdb.text)
|
||||
tvdata = tvdb.json()
|
||||
|
||||
if not tvdata["nbHits"] > 0:
|
||||
print("Can't find query on TheTVDB")
|
||||
@ -168,3 +168,5 @@ search_ygg(eng, True)
|
||||
search_ygg(args.query, False)
|
||||
search_ygg(fra, False)
|
||||
search_ygg(eng, False)
|
||||
|
||||
print("No results :(")
|
||||
|
Loading…
Reference in New Issue
Block a user