Add size option + anime-ultime to py3
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
parent
8fedf9222e
commit
bb1d4f6dc7
@ -1,52 +1,53 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import argparse
|
import argparse
|
||||||
import re
|
import re
|
||||||
import requests
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('url', help="url from anime-ultime.net")
|
parser.add_argument("url", help="url from anime-ultime.net")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
nextHop = True
|
nextHop = True
|
||||||
url = args.url
|
url = args.url
|
||||||
root_url = 'http://www.anime-ultime.net'
|
root_url = "http://www.anime-ultime.net"
|
||||||
|
|
||||||
while nextHop:
|
while nextHop:
|
||||||
r1 = requests.get(url)
|
r1 = requests.get(url)
|
||||||
m1 = re.search(
|
m1 = re.search("javascript:open_ddlbox('dl_orig', '([0-9]+)', 'orig')", r1.text)
|
||||||
"javascript:open_ddlbox\('dl_orig', '([0-9]+)', 'orig'\)", r1.text
|
m2 = re.search('submit.*:right;.*(info-0-1)/([0-9]+)/([^"]+)', r1.text)
|
||||||
)
|
|
||||||
m2 = re.search(
|
if m1 is None:
|
||||||
'submit.*:right;.*(info-0-1)/([0-9]+)/([^"]+)', r1.text
|
break
|
||||||
)
|
|
||||||
requests.post(
|
requests.post(
|
||||||
root_url + '/ddl/authorized_download.php',
|
f"{root_url}/ddl/authorized_download.php",
|
||||||
data={'idfile': m1.group(1), 'type': 'orig'}
|
data={"idfile": m1.group(1), "type": "orig"},
|
||||||
)
|
)
|
||||||
|
|
||||||
timeout = 46
|
timeout = 46
|
||||||
while timeout > 0:
|
while timeout > 0:
|
||||||
sys.stdout.write('\r')
|
sys.stdout.write("\r")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stdout.write('Waiting ' + str(timeout) + ' seconds ...')
|
sys.stdout.write(f"Waiting {timeout} seconds ...")
|
||||||
timeout = timeout - 1
|
timeout = timeout - 1
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
r2 = requests.post(
|
r2 = requests.post(
|
||||||
root_url + '/ddl/authorized_download.php',
|
f"{root_url}/ddl/authorized_download.php",
|
||||||
data={'idfile': m1.group(1), 'type': 'orig'}
|
data={"idfile": m1.group(1), "type": "orig"},
|
||||||
)
|
)
|
||||||
j = r2.json()
|
j = r2.json()
|
||||||
subprocess.call('wget -c -t 0 --content-disposition "' +
|
subprocess.call(
|
||||||
root_url + j['link'] + '"', shell=True)
|
f"wget -c -t 0 --content-disposition {root_url}{j['link']}", shell=True
|
||||||
|
)
|
||||||
|
|
||||||
if m2 is None:
|
if m2 is None:
|
||||||
nextHop = False
|
nextHop = False
|
||||||
sys.stdout.write('\r')
|
sys.stdout.write("\r")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
else:
|
else:
|
||||||
url = root_url + '/' + \
|
url = f"{root_url}/{m2.group(1)}/{m2.group(2)}/{m2.group(3)}"
|
||||||
m2.group(1) + '/' + m2.group(2) + '/' + m2.group(3)
|
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
|
||||||
import re
|
import re
|
||||||
from urllib.parse import urlencode, urlparse
|
from urllib.parse import urlencode, urlparse
|
||||||
|
|
||||||
@ -15,6 +14,7 @@ BLACKLIST_WORDS = ["dvd", "iso"]
|
|||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("-u", "--uploader", action="append")
|
parser.add_argument("-u", "--uploader", action="append")
|
||||||
parser.add_argument("-y", "--year", type=int)
|
parser.add_argument("-y", "--year", type=int)
|
||||||
|
parser.add_argument("-s", "--size", type=int, default=10)
|
||||||
parser.add_argument("query")
|
parser.add_argument("query")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@ -77,7 +77,7 @@ def get_files(id):
|
|||||||
req = session.get(
|
req = session.get(
|
||||||
"https://www5.yggtorrent.fi/engine/get_files", params={"torrent": id}
|
"https://www5.yggtorrent.fi/engine/get_files", params={"torrent": id}
|
||||||
)
|
)
|
||||||
files = json.loads(req.text)
|
files = req.json()
|
||||||
html = BeautifulSoup(files["html"], "html.parser")
|
html = BeautifulSoup(files["html"], "html.parser")
|
||||||
trs = html.select("tr")
|
trs = html.select("tr")
|
||||||
return len(trs)
|
return len(trs)
|
||||||
@ -112,7 +112,7 @@ def search_ygg(query, multi):
|
|||||||
size = tds[5].get_text()
|
size = tds[5].get_text()
|
||||||
name = tds[1].get_text().lower().strip()
|
name = tds[1].get_text().lower().strip()
|
||||||
|
|
||||||
if parse_size(size) > parse_size("10Go"):
|
if parse_size(size) > parse_size(f"{args.size}Go"):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if any(word.lower() in name for word in BLACKLIST_WORDS):
|
if any(word.lower() in name for word in BLACKLIST_WORDS):
|
||||||
@ -136,7 +136,7 @@ def search_ygg(query, multi):
|
|||||||
query_string = {"query": args.query, "filters": "type:movie"}
|
query_string = {"query": args.query, "filters": "type:movie"}
|
||||||
|
|
||||||
if args.year:
|
if args.year:
|
||||||
query_string["filters"] += " AND year:" + str(args.year)
|
query_string["filters"] += f" AND year:{args.year}"
|
||||||
|
|
||||||
tvdb = session.post(
|
tvdb = session.post(
|
||||||
"https://tvshowtime-dsn.algolia.net/1/indexes/TVDB/query",
|
"https://tvshowtime-dsn.algolia.net/1/indexes/TVDB/query",
|
||||||
@ -147,7 +147,7 @@ tvdb = session.post(
|
|||||||
json={"params": urlencode(query_string)},
|
json={"params": urlencode(query_string)},
|
||||||
)
|
)
|
||||||
|
|
||||||
tvdata = json.loads(tvdb.text)
|
tvdata = tvdb.json()
|
||||||
|
|
||||||
if not tvdata["nbHits"] > 0:
|
if not tvdata["nbHits"] > 0:
|
||||||
print("Can't find query on TheTVDB")
|
print("Can't find query on TheTVDB")
|
||||||
@ -168,3 +168,5 @@ search_ygg(eng, True)
|
|||||||
search_ygg(args.query, False)
|
search_ygg(args.query, False)
|
||||||
search_ygg(fra, False)
|
search_ygg(fra, False)
|
||||||
search_ygg(eng, False)
|
search_ygg(eng, False)
|
||||||
|
|
||||||
|
print("No results :(")
|
||||||
|
Loading…
Reference in New Issue
Block a user