106 lines
3.3 KiB
Python
106 lines
3.3 KiB
Python
from datetime import datetime
|
|
from os import getenv
|
|
from typing import List
|
|
from urllib import parse
|
|
|
|
from bs4 import BeautifulSoup
|
|
from pydantic import HttpUrl, parse_obj_as
|
|
|
|
from pynyaata2.cache import cache_data
|
|
from pynyaata2.filters import filter_data
|
|
from pynyaata2.types import Bridge, Color, RemoteFile, async_wrap
|
|
|
|
import requests
|
|
|
|
|
|
VF_WORDS = getenv("VF_WORDS", "vf,vostfr,multi,fre").split(",")
|
|
|
|
|
|
class Nyaa(Bridge):
|
|
color = Color.INFO
|
|
title = "Nyaa"
|
|
base_url = parse_obj_as(HttpUrl, "https://nyaa.si")
|
|
favicon = parse_obj_as(HttpUrl, f"{base_url}/static/favicon.png")
|
|
|
|
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
|
|
to_query = "|".join(map(lambda word: f"({query} {word})", VF_WORDS))
|
|
params = parse.urlencode(
|
|
{
|
|
"f": 0,
|
|
"c": "1_3",
|
|
"q": to_query,
|
|
"s": "size" if query else "id",
|
|
"o": "desc",
|
|
"p": page,
|
|
}
|
|
)
|
|
|
|
return parse_obj_as(HttpUrl, f"{self.base_url}?{params}")
|
|
|
|
@async_wrap
|
|
@cache_data
|
|
@filter_data
|
|
def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:
|
|
response = requests.get(self.search_url(query, page))
|
|
|
|
if response.status_code != 200:
|
|
raise requests.HTTPError(response)
|
|
|
|
torrents: List[RemoteFile] = []
|
|
html = BeautifulSoup(response.content, "html.parser")
|
|
trs = html.select("table.torrent-list tr")
|
|
|
|
for i, tr in enumerate(trs):
|
|
if not i:
|
|
continue
|
|
|
|
tds = tr.find_all("td")
|
|
urls = tds[1].find_all("a")
|
|
links = tds[2].find_all("a")
|
|
|
|
nb_pages = html.select("ul.pagination li")[-2]
|
|
current = nb_pages.select_one("span")
|
|
if current:
|
|
current.decompose()
|
|
|
|
torrents.append(
|
|
RemoteFile(
|
|
bridge=self.__class__.__name__,
|
|
id=urls[1 if len(urls) > 1 else 0]["href"].split("/")[-1],
|
|
category=tds[0].a["title"],
|
|
color=Color[tr["class"][0].upper()],
|
|
name=urls[1 if len(urls) > 1 else 0].get_text(),
|
|
link=f"{self.base_url}{urls[1 if len(urls) > 1 else 0]['href']}",
|
|
comment=urls[0].get_text() if len(urls) > 1 else 0,
|
|
comment_url=f"{self.base_url}{urls[0]['href']}",
|
|
magnet=links[1]["href"],
|
|
torrent=f"{self.base_url}{links[0]['href']}",
|
|
size=tds[3].get_text(),
|
|
date=datetime.fromtimestamp(int(tds[4]["data-timestamp"])),
|
|
seeds=tds[5].get_text(),
|
|
leechs=tds[6].get_text(),
|
|
downloads=tds[7].get_text(),
|
|
nb_pages=nb_pages.get_text(),
|
|
)
|
|
)
|
|
|
|
return torrents
|
|
|
|
|
|
class EraiRaws(Nyaa):
|
|
title = "Erai-raws"
|
|
|
|
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
|
|
params = parse.urlencode(
|
|
{
|
|
"f": 0,
|
|
"c": "1_2",
|
|
"q": f"{query} fre",
|
|
"s": "size" if query else "id",
|
|
"o": "desc",
|
|
"p": page,
|
|
}
|
|
)
|
|
|
|
return parse_obj_as(HttpUrl, f"{self.base_url}/user/Erai-raws?{params}")
|