This repository has been archived on 2023-10-01. You can view files and clone it, but cannot push or open issues or pull requests.
PyNyaaTa/pynyaata2/bridge/animeultime.py
2023-06-04 17:53:29 +02:00

134 lines
4.3 KiB
Python

from datetime import datetime, timedelta
from typing import List
from bs4 import BeautifulSoup
from pydantic import HttpUrl, parse_obj_as
from pynyaata2.cache import cache_data
from pynyaata2.filters import filter_data
from pynyaata2.types import Bridge, Color, RemoteFile, async_wrap
import requests
MONTHS = [
"Janvier",
"Février",
"Mars",
"Avril",
"Mai",
"Juin",
"Juillet",
"Août",
"Septembre",
"Octobre",
"Novembre",
"Décembre",
]
class AnimeUltime(Bridge):
color = Color.WARNING
title = "Anime-Ultime"
base_url = parse_obj_as(HttpUrl, "http://www.anime-ultime.net")
favicon = parse_obj_as(HttpUrl, f"{base_url}/favicon.ico")
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
try:
page_date = datetime.now() - timedelta((page - 1) * 365 / 12)
except OverflowError:
page_date = datetime.fromtimestamp(0)
return parse_obj_as(
HttpUrl,
(
f"{self.base_url}/"
f"{'search' if query else 'history'}-0-1/"
f"{page_date.strftime('%m%Y') if not query else ''}"
),
)
@async_wrap
@cache_data
@filter_data
def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:
response = (
requests.post(self.search_url(query, page), {"search": query})
if query
else requests.get(self.search_url(query, page))
)
if response.status_code != 200:
raise requests.HTTPError(response)
torrents: List[RemoteFile] = []
html = BeautifulSoup(response.content, "html.parser")
title = html.select_one("div.title")
titre = html.select_one("div.titre")
history = html.select_one("h1")
player = html.select_one("div.AUVideoPlayer")
tables = html.select("table.jtable")
if title and "Recherche" in title.get_text():
trs = html.select("table.jtable tr")
for i, tr in enumerate(trs):
if not i:
continue
tds = tr.find_all("td")
if tds:
torrents.append(
RemoteFile(
bridge=self.__class__.__name__,
id=tds[0].a["href"].split("/")[1].split("-")[0],
category=tds[1].get_text(),
name=tds[0].get_text(),
link=f"{self.base_url}/{tds[0].a['href']}",
)
)
elif history and "Historique" in history.get_text():
h3s = html.findAll("h3")
for i, table in enumerate(tables):
for j, tr in enumerate(table.find_all("tr")):
if not j:
continue
tds = tr.find_all("td")
if tds[0].a["href"] != "#":
date = h3s[i].get_text()[:-3].split(" ")
torrents.append(
RemoteFile(
bridge=self.__class__.__name__,
id=tds[0].a["href"].split("/")[-2],
category=tds[4].get_text(),
name=tds[0].get_text(),
link=f"{self.base_url}/{tds[0].a['href']}",
date=datetime(
int(date[3]),
MONTHS.index(date[2]) + 1,
int(date[1]),
),
)
)
elif player and titre and history:
torrents.append(
RemoteFile(
bridge=self.__class__.__name__,
id=player["data-serie"],
category=titre.get_text().split("(")[0].strip(),
name=history.get_text(),
link=f"{self.base_url}/file-0-1/{player['data-serie']}",
date=datetime.strptime(
tables[0].find_all("tr")[1].find_all("td")[1].get_text(),
"%d/%m/%y",
),
)
)
return torrents