2020-04-24 19:01:44 +00:00
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
|
2022-12-21 14:53:50 +00:00
|
|
|
from .core import ConnectorCache, ConnectorCore, ConnectorReturn, curl_content
|
|
|
|
from ..utils import check_blacklist_words, check_if_vf, link_exist_in_db
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Nyaa(ConnectorCore):
|
|
|
|
color = 'is-link'
|
|
|
|
title = 'Nyaa'
|
|
|
|
favicon = 'nyaa.png'
|
|
|
|
base_url = 'https://nyaa.si'
|
|
|
|
is_light = False
|
|
|
|
|
|
|
|
def get_full_search_url(self):
|
|
|
|
sort_type = 'size'
|
|
|
|
if self.return_type is ConnectorReturn.HISTORY:
|
|
|
|
sort_type = 'id'
|
|
|
|
|
2021-09-01 15:57:54 +00:00
|
|
|
to_query = '(%s vf)|(%s vostfr)|(%s multi)|(%s french)' % (
|
|
|
|
self.query,
|
|
|
|
self.query,
|
|
|
|
self.query,
|
|
|
|
self.query
|
|
|
|
)
|
2020-04-24 19:01:44 +00:00
|
|
|
return '%s/?f=0&c=1_3&s=%s&o=desc&q=%s&p=%s' % (self.base_url, sort_type, to_query, self.page)
|
|
|
|
|
|
|
|
def get_history(self):
|
|
|
|
self.search()
|
|
|
|
|
|
|
|
@ConnectorCache.cache_data
|
|
|
|
def search(self):
|
|
|
|
response = curl_content(self.get_full_search_url())
|
|
|
|
|
|
|
|
if response['http_code'] == 200:
|
|
|
|
html = BeautifulSoup(response['output'], 'html.parser')
|
|
|
|
trs = html.select('table.torrent-list tr')
|
|
|
|
valid_trs = 0
|
|
|
|
|
|
|
|
for i, tr in enumerate(trs):
|
|
|
|
if not i:
|
|
|
|
continue
|
|
|
|
|
|
|
|
tds = tr.findAll('td')
|
2020-05-20 14:25:08 +00:00
|
|
|
check_downloads = int(tds[7].get_text())
|
|
|
|
check_seeds = int(tds[5].get_text())
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
if check_downloads or check_seeds:
|
|
|
|
urls = tds[1].findAll('a')
|
|
|
|
|
|
|
|
if len(urls) > 1:
|
|
|
|
url = urls[1]
|
|
|
|
has_comment = True
|
|
|
|
else:
|
|
|
|
url = urls[0]
|
|
|
|
has_comment = False
|
|
|
|
|
|
|
|
url_safe = url.get_text()
|
|
|
|
|
2020-04-27 18:47:54 +00:00
|
|
|
if check_blacklist_words(url_safe):
|
2020-04-24 19:01:44 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
valid_trs = valid_trs + 1
|
2020-04-24 19:49:30 +00:00
|
|
|
href = self.base_url + url['href']
|
2020-04-24 19:01:44 +00:00
|
|
|
|
|
|
|
self.data.append({
|
2021-01-30 18:40:36 +00:00
|
|
|
'vf': check_if_vf(url_safe),
|
2020-04-24 19:01:44 +00:00
|
|
|
'href': href,
|
|
|
|
'name': url_safe,
|
2020-04-24 19:49:30 +00:00
|
|
|
'comment': str(urls[0]).replace('/view/', self.base_url + '/view/') if has_comment else '',
|
|
|
|
'link': tds[2].decode_contents().replace('/download/', self.base_url + '/download/'),
|
2020-05-20 14:25:08 +00:00
|
|
|
'size': tds[3].get_text(),
|
2021-07-10 07:25:35 +00:00
|
|
|
'date': tds[4].get_text(),
|
2020-04-24 19:01:44 +00:00
|
|
|
'seeds': check_seeds,
|
2020-05-20 14:25:08 +00:00
|
|
|
'leechs': tds[6].get_text(),
|
2020-04-24 19:01:44 +00:00
|
|
|
'downloads': check_downloads,
|
|
|
|
'class': self.color if link_exist_in_db(href) else 'is-%s' % tr['class'][0]
|
|
|
|
})
|
|
|
|
|
|
|
|
self.on_error = False
|
2021-07-11 08:51:57 +00:00
|
|
|
self.is_more = valid_trs and valid_trs != len(trs) - 1
|
2021-01-30 18:40:36 +00:00
|
|
|
|
|
|
|
@ConnectorCache.cache_data
|
|
|
|
def is_vf(self, url):
|
|
|
|
response = curl_content(url)
|
|
|
|
|
|
|
|
if response['http_code'] == 200:
|
|
|
|
html = BeautifulSoup(response['output'], 'html.parser')
|
|
|
|
title = html.select('h3.panel-title')
|
|
|
|
return check_if_vf(title[0].get_text())
|
|
|
|
|
|
|
|
return False
|