107 lines
3.6 KiB
Python
107 lines
3.6 KiB
Python
from datetime import datetime, timedelta
|
|
from typing import List
|
|
|
|
from bs4 import BeautifulSoup
|
|
from dateparser import parse
|
|
from pydantic import HttpUrl, parse_obj_as
|
|
|
|
from pynyaata.cache import cache_data
|
|
from pynyaata.filters import filter_data
|
|
from pynyaata.types import Bridge, Color, RemoteFile, log_async
|
|
|
|
import requests
|
|
|
|
|
|
class AnimeUltime(Bridge):
|
|
color = Color.WARNING
|
|
title = "Anime-Ultime"
|
|
base_url = parse_obj_as(HttpUrl, "http://www.anime-ultime.net")
|
|
favicon = parse_obj_as(HttpUrl, "http://www.anime-ultime.net/favicon.ico")
|
|
|
|
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
|
|
try:
|
|
page_date = datetime.now() - timedelta((page - 1) * 365 / 12)
|
|
except OverflowError:
|
|
page_date = datetime.fromtimestamp(0)
|
|
|
|
return parse_obj_as(
|
|
HttpUrl,
|
|
(
|
|
f"{self.base_url}"
|
|
f"{'search' if query else 'history'}-0-1/"
|
|
f"{page_date.strftime('%m%Y') if query else ''}"
|
|
),
|
|
)
|
|
|
|
@log_async
|
|
@cache_data
|
|
@filter_data
|
|
async def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:
|
|
response = (
|
|
requests.post(self.search_url(query, page), {"search": query})
|
|
if query
|
|
else requests.get(self.search_url(query, page))
|
|
)
|
|
|
|
if response.status_code != 200:
|
|
raise requests.HTTPError(response)
|
|
|
|
torrents: List[RemoteFile] = []
|
|
html = BeautifulSoup(response.content, "html.parser")
|
|
title = html.select_one("div.title")
|
|
history = html.select_one("h1")
|
|
player = html.select_one("div.AUVideoPlayer")
|
|
tables = html.select("table.jtable")
|
|
|
|
if title and "Recherche" in title.get_text():
|
|
trs = html.select("table.jtable tr")
|
|
|
|
for i, tr in enumerate(trs):
|
|
if not i:
|
|
continue
|
|
|
|
tds = tr.find_all("td")
|
|
|
|
torrents.append(
|
|
RemoteFile(
|
|
bridge=self.__class__.__name__,
|
|
id=tds[0].a["href"].split("/")[1].split("-")[0],
|
|
category=tds[1].get_text(),
|
|
name=tds[0].get_text(),
|
|
link=f"{self.base_url}{tds[0].a['href']}",
|
|
)
|
|
)
|
|
elif history and "Historique" in history.get_text():
|
|
h3s = html.findAll("h3")
|
|
|
|
for i, table in enumerate(tables):
|
|
for j, tr in enumerate(table.find_all("tr")):
|
|
if not j:
|
|
continue
|
|
|
|
tds = tr.find_all("td")
|
|
|
|
torrents.append(
|
|
RemoteFile(
|
|
bridge=self.__class__.__name__,
|
|
id=tds[0].a["href"].split("/")[-2],
|
|
category=tds[4].get_text(),
|
|
name=tds[0].get_text(),
|
|
link=f"{self.base_url}{tds[0].a['href']}",
|
|
date=parse(h3s[i].get_text()[:-3], ["%A %d %B %Y"]),
|
|
)
|
|
)
|
|
elif player and title and history and tables:
|
|
torrents.append(
|
|
RemoteFile(
|
|
bridge=self.__class__.__name__,
|
|
id=player["data-serie"],
|
|
category=title.get_text(),
|
|
name=history.get_text(),
|
|
link=f"{self.base_url}file-0-1/{player['data-serie']}",
|
|
date=tables[0].find_all("tr")[1].find_all("td")[1].get_text(),
|
|
)
|
|
)
|
|
|
|
return torrents
|