2020-04-24 19:01:44 +00:00
|
|
|
from datetime import datetime, timedelta
|
|
|
|
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
|
2022-09-01 18:52:02 +00:00
|
|
|
from .core import ConnectorCache, ConnectorCore, ConnectorReturn, curl_content
|
|
|
|
from ..utils import link_exist_in_db, parse_date
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
class AnimeUltime(ConnectorCore):
|
2022-09-01 18:52:02 +00:00
|
|
|
color = "is-warning"
|
|
|
|
title = "Anime-Ultime"
|
|
|
|
favicon = "animeultime.png"
|
|
|
|
base_url = "http://www.anime-ultime.net"
|
2020-04-24 19:01:44 +00:00
|
|
|
is_light = True
|
|
|
|
|
|
|
|
def get_full_search_url(self):
|
2022-09-01 18:52:02 +00:00
|
|
|
from_date = ""
|
|
|
|
sort_type = "search"
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
if self.return_type is ConnectorReturn.HISTORY:
|
|
|
|
try:
|
|
|
|
page_date = datetime.now() - timedelta((int(self.page) - 1) * 365 / 12)
|
|
|
|
except OverflowError:
|
|
|
|
page_date = datetime.fromtimestamp(0)
|
2022-09-01 18:52:02 +00:00
|
|
|
from_date = page_date.strftime("%m%Y")
|
|
|
|
sort_type = "history"
|
2020-04-24 19:01:44 +00:00
|
|
|
|
2022-09-01 18:52:02 +00:00
|
|
|
return "%s/%s-0-1/%s" % (self.base_url, sort_type, from_date)
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
@ConnectorCache.cache_data
|
|
|
|
def search(self):
|
2022-09-01 18:52:02 +00:00
|
|
|
response = curl_content(self.get_full_search_url(), {"search": self.query})
|
2020-04-24 19:01:44 +00:00
|
|
|
|
2022-09-01 18:52:02 +00:00
|
|
|
if response["http_code"] == 200:
|
|
|
|
html = BeautifulSoup(response["output"], "html.parser")
|
|
|
|
title = html.select("div.title")
|
|
|
|
player = html.select("div.AUVideoPlayer")
|
2020-04-24 19:01:44 +00:00
|
|
|
|
2022-09-01 18:52:02 +00:00
|
|
|
if len(title) > 0 and "Recherche" in title[0].get_text():
|
|
|
|
trs = html.select("table.jtable tr")
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
for i, tr in enumerate(trs):
|
|
|
|
if not i:
|
|
|
|
continue
|
|
|
|
|
2022-09-01 18:52:02 +00:00
|
|
|
tds = tr.findAll("td")
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
if len(tds) < 2:
|
|
|
|
continue
|
|
|
|
|
|
|
|
url = tds[0].a
|
2022-09-01 18:52:02 +00:00
|
|
|
href = "%s/%s" % (self.base_url, url["href"])
|
|
|
|
|
|
|
|
if not any(href == d["href"] for d in self.data):
|
|
|
|
self.data.append(
|
|
|
|
{
|
|
|
|
"vf": self.is_vf(),
|
|
|
|
"href": href,
|
|
|
|
"name": url.get_text(),
|
|
|
|
"type": tds[1].get_text(),
|
|
|
|
"class": self.color if link_exist_in_db(href) else "",
|
|
|
|
}
|
|
|
|
)
|
2020-05-16 18:38:03 +00:00
|
|
|
elif len(player) > 0:
|
2022-09-01 18:52:02 +00:00
|
|
|
name = html.select("h1")
|
|
|
|
ani_type = html.select("div.titre")
|
|
|
|
href = "%s/file-0-1/%s" % (self.base_url, player[0]["data-serie"])
|
|
|
|
|
|
|
|
self.data.append(
|
|
|
|
{
|
|
|
|
"vf": self.is_vf(),
|
|
|
|
"href": href,
|
|
|
|
"name": name[0].get_text(),
|
|
|
|
"type": ani_type[0].get_text().replace(":", ""),
|
|
|
|
"class": self.color if link_exist_in_db(href) else "",
|
|
|
|
}
|
2021-09-01 15:57:54 +00:00
|
|
|
)
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
self.on_error = False
|
|
|
|
|
|
|
|
@ConnectorCache.cache_data
|
|
|
|
def get_history(self):
|
|
|
|
response = curl_content(self.get_full_search_url())
|
|
|
|
|
2022-09-01 18:52:02 +00:00
|
|
|
if response["http_code"] == 200:
|
|
|
|
html = BeautifulSoup(response["output"], "html.parser")
|
|
|
|
tables = html.select("table.jtable")
|
|
|
|
h3s = html.findAll("h3")
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
for i, table in enumerate(tables):
|
2022-09-01 18:52:02 +00:00
|
|
|
for j, tr in enumerate(table.findAll("tr")):
|
2020-04-24 19:01:44 +00:00
|
|
|
if not j:
|
|
|
|
continue
|
|
|
|
|
2022-09-01 18:52:02 +00:00
|
|
|
tds = tr.findAll("td")
|
2020-04-24 19:01:44 +00:00
|
|
|
link = tds[0].a
|
2022-09-01 18:52:02 +00:00
|
|
|
href = "%s/%s" % (self.base_url, link["href"])
|
|
|
|
|
|
|
|
self.data.append(
|
|
|
|
{
|
|
|
|
"vf": self.is_vf(),
|
|
|
|
"href": href,
|
|
|
|
"name": link.get_text(),
|
|
|
|
"type": tds[4].get_text(),
|
|
|
|
"date": parse_date(h3s[i].string[:-3], "%A %d %B %Y"),
|
|
|
|
"class": self.color if link_exist_in_db(href) else "",
|
|
|
|
}
|
|
|
|
)
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
self.on_error = False
|
2021-01-30 18:40:36 +00:00
|
|
|
|
|
|
|
@ConnectorCache.cache_data
|
2022-09-01 18:52:02 +00:00
|
|
|
def is_vf(self, url=""):
|
2021-01-30 18:40:36 +00:00
|
|
|
return False
|