Add 2hdp.py
This commit is contained in:
parent
5536d2e0c7
commit
4a4742b2f9
100
commands/2hdp.py
Executable file
100
commands/2hdp.py
Executable file
@ -0,0 +1,100 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import csv
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import bs4
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("-s", "--season", type=int)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
out = csv.DictWriter(sys.stdout, ["EP", "NAME", "URL"])
|
||||||
|
out.writeheader()
|
||||||
|
sys.stdout.flush()
|
||||||
|
page = 1
|
||||||
|
|
||||||
|
while True:
|
||||||
|
page_req = requests.get(
|
||||||
|
"https://www.2hdp.fr/", params={"season": args.season, "page": page}
|
||||||
|
)
|
||||||
|
page_html = bs4.BeautifulSoup(page_req.text, "html.parser")
|
||||||
|
episodes = page_html.select("a.mx-auto")
|
||||||
|
|
||||||
|
if len(episodes) == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
for episode in episodes:
|
||||||
|
episode_req = requests.get(str(episode["href"]))
|
||||||
|
episode_html = bs4.BeautifulSoup(episode_req.text, "html.parser")
|
||||||
|
|
||||||
|
raw_title = episode_html.select_one("h1.inline-block")
|
||||||
|
if not raw_title:
|
||||||
|
continue
|
||||||
|
title = raw_title.get_text().strip()
|
||||||
|
|
||||||
|
raw_year = episode_html.select_one("div.block")
|
||||||
|
if not raw_year:
|
||||||
|
continue
|
||||||
|
re_year = re.search(r"\((\d*)\)", raw_year.get_text())
|
||||||
|
if not re_year:
|
||||||
|
continue
|
||||||
|
year = re_year.group(1)
|
||||||
|
|
||||||
|
raw_ep = episode_html.select_one("strong.flex-shrink-0")
|
||||||
|
if not raw_ep:
|
||||||
|
continue
|
||||||
|
ep = raw_ep.get_text().strip()
|
||||||
|
|
||||||
|
try:
|
||||||
|
output = subprocess.run(
|
||||||
|
[
|
||||||
|
"python",
|
||||||
|
"pygg.py",
|
||||||
|
"-u",
|
||||||
|
"winks",
|
||||||
|
"-u",
|
||||||
|
"mhdgz",
|
||||||
|
"-y",
|
||||||
|
year,
|
||||||
|
title,
|
||||||
|
],
|
||||||
|
check=True,
|
||||||
|
capture_output=True,
|
||||||
|
)
|
||||||
|
out.writerow(
|
||||||
|
{"EP": ep, "NAME": title, "URL": output.stdout.decode().strip()}
|
||||||
|
)
|
||||||
|
sys.stdout.flush()
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
output = subprocess.run(
|
||||||
|
[
|
||||||
|
"python",
|
||||||
|
"pygg.py",
|
||||||
|
"-y",
|
||||||
|
year,
|
||||||
|
title,
|
||||||
|
],
|
||||||
|
check=True,
|
||||||
|
capture_output=True,
|
||||||
|
)
|
||||||
|
out.writerow(
|
||||||
|
{"EP": ep, "NAME": title, "URL": output.stdout.decode().strip()}
|
||||||
|
)
|
||||||
|
sys.stdout.flush()
|
||||||
|
continue
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
out.writerow({"EP": ep, "NAME": title, "URL": "No results"})
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
page += 1
|
2
commands/conan-eps.py
Normal file → Executable file
2
commands/conan-eps.py
Normal file → Executable file
@ -102,6 +102,7 @@ tables = soup.select("table.wikitable")
|
|||||||
|
|
||||||
out = csv.DictWriter(sys.stdout, CSV_COLUMNS)
|
out = csv.DictWriter(sys.stdout, CSV_COLUMNS)
|
||||||
out.writeheader()
|
out.writeheader()
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
for season, table in enumerate(tables):
|
for season, table in enumerate(tables):
|
||||||
if not season:
|
if not season:
|
||||||
@ -139,3 +140,4 @@ for season, table in enumerate(tables):
|
|||||||
"Date": tds[4].text.strip(),
|
"Date": tds[4].text.strip(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
sys.stdout.flush()
|
||||||
|
12
commands/paimon.py
Normal file → Executable file
12
commands/paimon.py
Normal file → Executable file
@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import argparse
|
|
||||||
import csv
|
import csv
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import zipfile
|
import zipfile
|
||||||
@ -28,12 +28,6 @@ def compute_base(data, type: str, base: float = 0.0):
|
|||||||
return round(base + boost)
|
return round(base + boost)
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument(
|
|
||||||
"output", help="Path of the output CSV file", type=argparse.FileType("w")
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
main, message = urllib.request.urlretrieve(
|
main, message = urllib.request.urlretrieve(
|
||||||
"https://github.com/MadeBaruna/paimon-moe/archive/refs/heads/main.zip"
|
"https://github.com/MadeBaruna/paimon-moe/archive/refs/heads/main.zip"
|
||||||
)
|
)
|
||||||
@ -45,11 +39,12 @@ with zipfile.ZipFile(main) as zip:
|
|||||||
zip.extractall(CHAR_DATA_TMP)
|
zip.extractall(CHAR_DATA_TMP)
|
||||||
|
|
||||||
out = csv.DictWriter(
|
out = csv.DictWriter(
|
||||||
args.output,
|
sys.stdout,
|
||||||
["ID", "PV", "ATQ", "DEF", "Taux CRIT", "DGT CRIT", "EM", "Energy Cost"],
|
["ID", "PV", "ATQ", "DEF", "Taux CRIT", "DGT CRIT", "EM", "Energy Cost"],
|
||||||
)
|
)
|
||||||
|
|
||||||
out.writeheader()
|
out.writeheader()
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
for file in sorted(os.listdir(CHAR_DATA_OUT)):
|
for file in sorted(os.listdir(CHAR_DATA_OUT)):
|
||||||
with open(os.path.join(CHAR_DATA_OUT, file)) as character:
|
with open(os.path.join(CHAR_DATA_OUT, file)) as character:
|
||||||
@ -67,3 +62,4 @@ for file in sorted(os.listdir(CHAR_DATA_OUT)):
|
|||||||
- compute_base(data, "er"),
|
- compute_base(data, "er"),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
sys.stdout.flush()
|
||||||
|
Loading…
Reference in New Issue
Block a user