2022-11-13 16:33:32 +00:00
|
|
|
#!/usr/bin/env python3
|
2022-11-13 16:14:26 +00:00
|
|
|
import argparse
|
2022-12-24 10:47:15 +00:00
|
|
|
import os
|
2022-11-13 16:14:26 +00:00
|
|
|
import re
|
2022-12-24 10:47:15 +00:00
|
|
|
import urllib
|
2022-11-13 16:14:26 +00:00
|
|
|
|
2022-12-24 10:47:15 +00:00
|
|
|
import bs4
|
|
|
|
import requests
|
2022-11-13 16:14:26 +00:00
|
|
|
|
2022-12-24 10:47:15 +00:00
|
|
|
|
2023-01-05 21:55:43 +00:00
|
|
|
YGG_DOMAIN = "www6.yggtorrent.lol"
|
|
|
|
YGG_IP = os.getenv("YGG_IP")
|
2022-11-13 16:14:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument("-u", "--uploader", action="append")
|
2022-12-24 10:47:15 +00:00
|
|
|
parser.add_argument("-b", "--blacklist", action="append", default=["dvd", "iso"])
|
2022-11-13 16:14:26 +00:00
|
|
|
parser.add_argument("-y", "--year", type=int)
|
2022-11-13 17:24:28 +00:00
|
|
|
parser.add_argument("-s", "--size", type=int, default=10)
|
2022-11-17 21:12:00 +00:00
|
|
|
parser.add_argument("-d", "--downloads", type=int, default=20)
|
2022-11-13 16:14:26 +00:00
|
|
|
parser.add_argument("query")
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
|
|
def parse_size(size):
|
|
|
|
units = {"o": 1, "Ko": 10**3, "Mo": 10**6, "Go": 10**9, "To": 10**12}
|
|
|
|
match = re.search("([0-9.]+)([^0-9]+)", size)
|
|
|
|
number = match.group(1).strip()
|
|
|
|
unit = match.group(2).strip()
|
|
|
|
return int(float(number) * units[unit])
|
|
|
|
|
|
|
|
|
2022-11-17 00:29:15 +00:00
|
|
|
def check_files(id):
|
2023-01-05 21:55:43 +00:00
|
|
|
req = requests.get(
|
|
|
|
f"http://{YGG_IP}/engine/get_files",
|
|
|
|
params={"torrent": id},
|
|
|
|
headers={"Host": YGG_DOMAIN},
|
|
|
|
)
|
2023-01-05 23:09:11 +00:00
|
|
|
res = req.json()
|
2023-01-05 21:55:43 +00:00
|
|
|
html = bs4.BeautifulSoup(res["html"], "html.parser")
|
2022-11-13 16:14:26 +00:00
|
|
|
trs = html.select("tr")
|
2022-11-17 00:29:15 +00:00
|
|
|
return len(trs) == 1 and "mkv" in trs[0].get_text().lower()
|
2022-11-13 16:14:26 +00:00
|
|
|
|
|
|
|
|
2022-11-14 17:42:52 +00:00
|
|
|
def search_ygg(query, multi, full):
|
2022-11-13 16:14:26 +00:00
|
|
|
ygg_params = {
|
2022-11-14 13:51:25 +00:00
|
|
|
"name": query,
|
2022-11-13 16:14:26 +00:00
|
|
|
"category": "2145",
|
|
|
|
"sub_category": "2183",
|
|
|
|
"do": "search",
|
|
|
|
"order": "asc",
|
|
|
|
"sort": "publish_date",
|
|
|
|
}
|
|
|
|
|
2022-11-14 17:42:52 +00:00
|
|
|
if full and args.year:
|
|
|
|
ygg_params["name"] += f" {args.year}"
|
|
|
|
|
2022-11-13 16:14:26 +00:00
|
|
|
if multi:
|
2022-11-13 19:19:44 +00:00
|
|
|
ygg_params["option_langue:multiple[]"] = "4"
|
2022-11-13 16:14:26 +00:00
|
|
|
|
2023-01-05 21:55:43 +00:00
|
|
|
req = requests.get(
|
|
|
|
f"http://{YGG_IP}/engine/search",
|
|
|
|
params=ygg_params,
|
|
|
|
headers={"Host": YGG_DOMAIN},
|
|
|
|
)
|
2022-12-24 13:49:15 +00:00
|
|
|
html = bs4.BeautifulSoup(
|
|
|
|
req.text,
|
|
|
|
"html.parser",
|
|
|
|
)
|
2022-11-13 16:14:26 +00:00
|
|
|
trs = html.select("table.table tr")
|
|
|
|
|
|
|
|
if len(trs) > 1:
|
|
|
|
for i, tr in enumerate(trs):
|
|
|
|
if not i:
|
|
|
|
continue
|
|
|
|
|
|
|
|
tds = tr.find_all("td")
|
|
|
|
size = tds[5].get_text()
|
2022-11-17 00:12:13 +00:00
|
|
|
downloads = tds[6].get_text()
|
2022-11-13 16:14:26 +00:00
|
|
|
name = tds[1].get_text().lower().strip()
|
|
|
|
|
2022-11-13 17:24:28 +00:00
|
|
|
if parse_size(size) > parse_size(f"{args.size}Go"):
|
2022-11-13 16:14:26 +00:00
|
|
|
continue
|
|
|
|
|
2022-11-17 21:12:00 +00:00
|
|
|
if int(downloads) < args.downloads:
|
2022-11-17 00:12:13 +00:00
|
|
|
continue
|
|
|
|
|
2022-12-24 10:47:15 +00:00
|
|
|
if any(word.lower() in name for word in args.blacklist):
|
2022-11-13 16:14:26 +00:00
|
|
|
continue
|
|
|
|
|
2022-11-14 17:11:22 +00:00
|
|
|
if args.year and str(args.year) not in name:
|
2022-11-14 13:51:25 +00:00
|
|
|
continue
|
|
|
|
|
2022-11-13 16:14:26 +00:00
|
|
|
if args.uploader and not any(
|
|
|
|
uploader.lower() in name for uploader in args.uploader
|
|
|
|
):
|
|
|
|
continue
|
|
|
|
|
|
|
|
link = tds[1].a["href"]
|
|
|
|
id = link.split("/")[-1].split("-")[0]
|
|
|
|
|
2022-11-17 00:29:15 +00:00
|
|
|
if not check_files(id):
|
2022-11-13 16:14:26 +00:00
|
|
|
continue
|
|
|
|
|
2022-11-14 13:51:25 +00:00
|
|
|
print(f"{name} {link}")
|
2022-11-13 16:14:26 +00:00
|
|
|
exit(0)
|
|
|
|
|
|
|
|
|
|
|
|
query_string = {"query": args.query, "filters": "type:movie"}
|
|
|
|
|
|
|
|
if args.year:
|
2022-11-13 17:24:28 +00:00
|
|
|
query_string["filters"] += f" AND year:{args.year}"
|
2022-11-13 16:14:26 +00:00
|
|
|
|
2022-12-24 10:47:15 +00:00
|
|
|
tvdb = requests.post(
|
2022-11-13 16:14:26 +00:00
|
|
|
"https://tvshowtime-dsn.algolia.net/1/indexes/TVDB/query",
|
|
|
|
params={
|
|
|
|
"x-algolia-application-id": "tvshowtime",
|
|
|
|
"x-algolia-api-key": "c9d5ec1316cec12f093754c69dd879d3",
|
|
|
|
},
|
2022-12-24 10:47:15 +00:00
|
|
|
json={"params": urllib.parse.urlencode(query_string)},
|
2022-11-13 16:14:26 +00:00
|
|
|
)
|
|
|
|
|
2022-11-13 17:24:28 +00:00
|
|
|
tvdata = tvdb.json()
|
2022-11-13 16:14:26 +00:00
|
|
|
|
|
|
|
if not tvdata["nbHits"] > 0:
|
|
|
|
print("Can't find query on TheTVDB")
|
|
|
|
exit(1)
|
|
|
|
|
|
|
|
eng = tvdata["hits"][0]["name"]
|
|
|
|
|
|
|
|
fra = (
|
|
|
|
tvdata["hits"][0]["translations"]["fra"]
|
|
|
|
if "fra" in tvdata["hits"][0]["translations"]
|
|
|
|
else args.query
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-11-14 17:42:52 +00:00
|
|
|
search_ygg(args.query, True, True)
|
|
|
|
search_ygg(fra, True, True)
|
|
|
|
search_ygg(eng, True, True)
|
|
|
|
search_ygg(args.query, False, True)
|
|
|
|
search_ygg(fra, False, True)
|
|
|
|
search_ygg(eng, False, True)
|
|
|
|
|
|
|
|
if args.year:
|
|
|
|
search_ygg(args.query, True, False)
|
|
|
|
search_ygg(fra, True, False)
|
|
|
|
search_ygg(eng, True, False)
|
|
|
|
search_ygg(args.query, False, False)
|
|
|
|
search_ygg(fra, False, False)
|
|
|
|
search_ygg(eng, False, False)
|
2022-11-13 17:24:28 +00:00
|
|
|
|
|
|
|
print("No results :(")
|