2022-10-26 13:06:04 +00:00
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from typing import List
|
|
|
|
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
from pydantic import HttpUrl, parse_obj_as
|
|
|
|
|
2023-06-04 15:53:29 +00:00
|
|
|
from pynyaata2.cache import cache_data
|
|
|
|
from pynyaata2.filters import filter_data
|
|
|
|
from pynyaata2.types import Bridge, Color, RemoteFile, async_wrap
|
2022-10-26 13:06:04 +00:00
|
|
|
|
2022-12-21 22:32:42 +00:00
|
|
|
import requests
|
2022-10-26 13:06:04 +00:00
|
|
|
|
|
|
|
|
2023-01-24 14:28:29 +00:00
|
|
|
MONTHS = [
|
|
|
|
"Janvier",
|
|
|
|
"Février",
|
|
|
|
"Mars",
|
|
|
|
"Avril",
|
|
|
|
"Mai",
|
|
|
|
"Juin",
|
|
|
|
"Juillet",
|
|
|
|
"Août",
|
|
|
|
"Septembre",
|
|
|
|
"Octobre",
|
|
|
|
"Novembre",
|
|
|
|
"Décembre",
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2022-10-26 13:06:04 +00:00
|
|
|
class AnimeUltime(Bridge):
|
|
|
|
color = Color.WARNING
|
|
|
|
title = "Anime-Ultime"
|
|
|
|
base_url = parse_obj_as(HttpUrl, "http://www.anime-ultime.net")
|
2023-01-04 21:58:09 +00:00
|
|
|
favicon = parse_obj_as(HttpUrl, f"{base_url}/favicon.ico")
|
2022-10-26 13:06:04 +00:00
|
|
|
|
|
|
|
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
|
|
|
|
try:
|
|
|
|
page_date = datetime.now() - timedelta((page - 1) * 365 / 12)
|
|
|
|
except OverflowError:
|
|
|
|
page_date = datetime.fromtimestamp(0)
|
|
|
|
|
|
|
|
return parse_obj_as(
|
|
|
|
HttpUrl,
|
|
|
|
(
|
2023-01-04 23:49:54 +00:00
|
|
|
f"{self.base_url}/"
|
2022-10-26 13:06:04 +00:00
|
|
|
f"{'search' if query else 'history'}-0-1/"
|
2023-01-13 20:32:58 +00:00
|
|
|
f"{page_date.strftime('%m%Y') if not query else ''}"
|
2022-10-26 13:06:04 +00:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2023-01-13 22:11:55 +00:00
|
|
|
@async_wrap
|
2022-12-22 00:01:21 +00:00
|
|
|
@cache_data
|
|
|
|
@filter_data
|
2023-01-13 22:11:55 +00:00
|
|
|
def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:
|
2022-10-26 13:06:04 +00:00
|
|
|
response = (
|
|
|
|
requests.post(self.search_url(query, page), {"search": query})
|
|
|
|
if query
|
|
|
|
else requests.get(self.search_url(query, page))
|
|
|
|
)
|
|
|
|
|
|
|
|
if response.status_code != 200:
|
2022-12-21 22:32:42 +00:00
|
|
|
raise requests.HTTPError(response)
|
2022-10-26 13:06:04 +00:00
|
|
|
|
|
|
|
torrents: List[RemoteFile] = []
|
|
|
|
html = BeautifulSoup(response.content, "html.parser")
|
|
|
|
title = html.select_one("div.title")
|
2023-01-13 23:36:13 +00:00
|
|
|
titre = html.select_one("div.titre")
|
2022-10-26 13:06:04 +00:00
|
|
|
history = html.select_one("h1")
|
|
|
|
player = html.select_one("div.AUVideoPlayer")
|
|
|
|
tables = html.select("table.jtable")
|
|
|
|
|
|
|
|
if title and "Recherche" in title.get_text():
|
|
|
|
trs = html.select("table.jtable tr")
|
|
|
|
|
|
|
|
for i, tr in enumerate(trs):
|
|
|
|
if not i:
|
|
|
|
continue
|
|
|
|
|
|
|
|
tds = tr.find_all("td")
|
|
|
|
|
2023-01-13 20:32:58 +00:00
|
|
|
if tds:
|
|
|
|
torrents.append(
|
|
|
|
RemoteFile(
|
|
|
|
bridge=self.__class__.__name__,
|
|
|
|
id=tds[0].a["href"].split("/")[1].split("-")[0],
|
|
|
|
category=tds[1].get_text(),
|
|
|
|
name=tds[0].get_text(),
|
|
|
|
link=f"{self.base_url}/{tds[0].a['href']}",
|
|
|
|
)
|
2022-10-26 13:06:04 +00:00
|
|
|
)
|
|
|
|
elif history and "Historique" in history.get_text():
|
|
|
|
h3s = html.findAll("h3")
|
|
|
|
|
|
|
|
for i, table in enumerate(tables):
|
|
|
|
for j, tr in enumerate(table.find_all("tr")):
|
|
|
|
if not j:
|
|
|
|
continue
|
|
|
|
|
|
|
|
tds = tr.find_all("td")
|
|
|
|
|
2023-01-13 22:11:55 +00:00
|
|
|
if tds[0].a["href"] != "#":
|
2023-01-24 14:28:29 +00:00
|
|
|
date = h3s[i].get_text()[:-3].split(" ")
|
|
|
|
|
2023-01-13 22:11:55 +00:00
|
|
|
torrents.append(
|
|
|
|
RemoteFile(
|
|
|
|
bridge=self.__class__.__name__,
|
|
|
|
id=tds[0].a["href"].split("/")[-2],
|
|
|
|
category=tds[4].get_text(),
|
|
|
|
name=tds[0].get_text(),
|
|
|
|
link=f"{self.base_url}/{tds[0].a['href']}",
|
2023-01-24 14:28:29 +00:00
|
|
|
date=datetime(
|
|
|
|
int(date[3]),
|
|
|
|
MONTHS.index(date[2]) + 1,
|
|
|
|
int(date[1]),
|
|
|
|
),
|
2023-01-13 22:11:55 +00:00
|
|
|
)
|
2022-10-26 13:06:04 +00:00
|
|
|
)
|
2023-01-13 23:36:13 +00:00
|
|
|
elif player and titre and history:
|
2022-10-26 13:06:04 +00:00
|
|
|
torrents.append(
|
|
|
|
RemoteFile(
|
2022-12-22 00:01:21 +00:00
|
|
|
bridge=self.__class__.__name__,
|
2022-10-26 13:06:04 +00:00
|
|
|
id=player["data-serie"],
|
2023-01-13 23:36:13 +00:00
|
|
|
category=titre.get_text().split("(")[0].strip(),
|
2022-10-26 13:06:04 +00:00
|
|
|
name=history.get_text(),
|
2023-01-13 20:32:58 +00:00
|
|
|
link=f"{self.base_url}/file-0-1/{player['data-serie']}",
|
2023-01-13 23:36:13 +00:00
|
|
|
date=datetime.strptime(
|
|
|
|
tables[0].find_all("tr")[1].find_all("td")[1].get_text(),
|
|
|
|
"%d/%m/%y",
|
|
|
|
),
|
2022-10-26 13:06:04 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
return torrents
|