This repository has been archived on 2023-10-01. You can view files and clone it, but cannot push or open issues or pull requests.
PyNyaaTa/pynyaata/connectors/animeultime.py

114 lines
3.8 KiB
Python
Raw Permalink Normal View History

2020-04-24 19:01:44 +00:00
from datetime import datetime, timedelta
from bs4 import BeautifulSoup
2022-12-21 14:53:50 +00:00
from .core import ConnectorCache, ConnectorCore, ConnectorReturn, curl_content
from ..utils import link_exist_in_db, parse_date
2020-04-24 19:01:44 +00:00
class AnimeUltime(ConnectorCore):
color = 'is-warning'
title = 'Anime-Ultime'
favicon = 'animeultime.png'
base_url = 'http://www.anime-ultime.net'
is_light = True
def get_full_search_url(self):
from_date = ''
sort_type = 'search'
if self.return_type is ConnectorReturn.HISTORY:
try:
page_date = datetime.now() - timedelta((int(self.page) - 1) * 365 / 12)
except OverflowError:
page_date = datetime.fromtimestamp(0)
from_date = page_date.strftime('%m%Y')
sort_type = 'history'
return '%s/%s-0-1/%s' % (self.base_url, sort_type, from_date)
@ConnectorCache.cache_data
def search(self):
response = curl_content(self.get_full_search_url(), {
'search': self.query
})
2020-04-24 19:01:44 +00:00
if response['http_code'] == 200:
html = BeautifulSoup(response['output'], 'html.parser')
title = html.select('div.title')
2020-05-16 18:38:03 +00:00
player = html.select('div.AUVideoPlayer')
2020-04-24 19:01:44 +00:00
2021-03-08 13:05:20 +00:00
if len(title) > 0 and 'Recherche' in title[0].get_text():
2020-04-24 19:01:44 +00:00
trs = html.select('table.jtable tr')
for i, tr in enumerate(trs):
if not i:
continue
tds = tr.findAll('td')
if len(tds) < 2:
continue
url = tds[0].a
href = '%s/%s' % (self.base_url, url['href'])
2020-12-26 14:42:05 +00:00
if not any(href == d['href'] for d in self.data):
self.data.append({
2021-01-30 18:40:36 +00:00
'vf': self.is_vf(),
2020-12-26 14:42:05 +00:00
'href': href,
'name': url.get_text(),
'type': tds[1].get_text(),
'class': self.color if link_exist_in_db(href) else ''
})
2020-05-16 18:38:03 +00:00
elif len(player) > 0:
2020-04-24 19:01:44 +00:00
name = html.select('h1')
ani_type = html.select('div.titre')
href = '%s/file-0-1/%s' % (
self.base_url,
player[0]['data-serie']
)
2020-04-24 19:01:44 +00:00
self.data.append({
2021-01-30 18:40:36 +00:00
'vf': self.is_vf(),
'href': href,
'name': name[0].get_text(),
'type': ani_type[0].get_text().replace(':', ''),
2020-04-24 19:01:44 +00:00
'class': self.color if link_exist_in_db(href) else ''
})
self.on_error = False
@ConnectorCache.cache_data
def get_history(self):
response = curl_content(self.get_full_search_url())
if response['http_code'] == 200:
html = BeautifulSoup(response['output'], 'html.parser')
tables = html.select('table.jtable')
h3s = html.findAll('h3')
for i, table in enumerate(tables):
for j, tr in enumerate(table.findAll('tr')):
if not j:
continue
tds = tr.findAll('td')
link = tds[0].a
href = '%s/%s' % (self.base_url, link['href'])
self.data.append({
2021-01-30 18:40:36 +00:00
'vf': self.is_vf(),
'href': href,
'name': link.get_text(),
'type': tds[4].get_text(),
2020-04-24 19:01:44 +00:00
'date': parse_date(h3s[i].string[:-3], '%A %d %B %Y'),
'class': self.color if link_exist_in_db(href) else ''
})
self.on_error = False
2021-01-30 18:40:36 +00:00
@ConnectorCache.cache_data
def is_vf(self, url=''):
return False