Compare commits
33 Commits
Author | SHA1 | Date |
---|---|---|
Michel Roux | 82384918be | |
Michel Roux | 1a40f32425 | |
Michel Roux | 30c05d9eac | |
Michel Roux | 1e0f1a76af | |
Michel Roux | 5a1d0da33e | |
Michel Roux | 353ed7fb27 | |
Michel Roux | e4fcc45e7d | |
Michel Roux | 6fd4708b22 | |
Michel Roux | da7f857095 | |
Michel Roux | 811dea7f9c | |
Michel Roux | 8a01893406 | |
Michel Roux | e95e3fd631 | |
Michel Roux | bc5b524948 | |
Michel Roux | 589ea325de | |
Michel Roux | 54a36adebf | |
Michel Roux | ce78aa7e55 | |
Michel Roux | 69b6562121 | |
Michel Roux | 39e9d123b5 | |
Michel Roux | 8ceebb80ae | |
Michel Roux | cbf978c9ea | |
Michel Roux | c5279e26f8 | |
Michel Roux | abfb1bfe7f | |
Michel Roux | 0d8dd65028 | |
Michel Roux | 660f1732ce | |
Michel Roux | 1a70528880 | |
Michel Roux | e6e08cc8a1 | |
Michel Roux | d554b0700f | |
Michel Roux | 561a0754d8 | |
Michel Roux | 933b016917 | |
Michel Roux | dbb7586496 | |
Michel Roux | 910cc4e692 | |
Michel Roux | 434fbeb9fa | |
Michel Roux | 46977c7e38 |
|
@ -1,3 +0,0 @@
|
|||
.idea
|
||||
.venv
|
||||
.db
|
|
@ -1,9 +0,0 @@
|
|||
FLASK_APP=run.py
|
||||
FLASK_ENV=development
|
||||
FLASK_PORT=5000
|
||||
REDIS_SERVER=redis
|
||||
ADMIN_USERNAME=admin
|
||||
ADMIN_PASSWORD=secret
|
||||
REQUESTS_TIMEOUT=5
|
||||
CACHE_TIMEOUT=3600
|
||||
BLACKLIST_WORDS=Chris44,Vol.,[zza],.ssa,.ass,Ref:rain
|
3
.flake8
|
@ -1,2 +1,3 @@
|
|||
[flake8]
|
||||
max-line-length = 120
|
||||
max-line-length = 111
|
||||
per-file-ignores = tests/bridge/*:E501
|
||||
|
|
|
@ -1,55 +0,0 @@
|
|||
name: books
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
flake8:
|
||||
runs-on: ubuntu-latest
|
||||
container: python
|
||||
steps:
|
||||
- run: apt-get update
|
||||
- run: apt-get install -y git nodejs
|
||||
- uses: actions/checkout@v4
|
||||
- run: pip install flake8
|
||||
- run: flake8 pynyaata
|
||||
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
container: docker
|
||||
needs: [lint]
|
||||
steps:
|
||||
- run: apk add git nodejs
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/metadata-action@v5
|
||||
id: meta
|
||||
with:
|
||||
images: xefir/pynyaata
|
||||
tags: |
|
||||
type=schedule
|
||||
type=ref,event=tag
|
||||
type=ref,event=pr
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: ${{ gitea.ref == 'refs/heads/master' || startsWith(gitea.ref, 'refs/tags') }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
pypi:
|
||||
runs-on: ubuntu-latest
|
||||
container: python
|
||||
needs: [lint]
|
||||
if: ${{ gitea.ref == 'refs/heads/master' || startsWith(gitea.ref, 'refs/tags') }}
|
||||
steps:
|
||||
- run: apt-get update
|
||||
- run: apt-get install -y git nodejs
|
||||
- uses: actions/checkout@v4
|
||||
- run: pip install twine
|
||||
- run: python setup.py sdist
|
||||
- run: twine upload dist/*
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
|
|
@ -1,10 +1,164 @@
|
|||
.idea
|
||||
.venv
|
||||
.vscode
|
||||
.db
|
||||
# https://github.com/github/gitignore/blob/main/Python.gitignore
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
dist
|
||||
build
|
||||
*.egg*
|
||||
__pycache__
|
||||
test.py
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
# Tests
|
||||
mocks/*.html
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
FROM python:3.11.5
|
||||
|
||||
COPY pynyaata /app/pynyaata
|
||||
COPY requirements.txt *.py /app/
|
||||
WORKDIR /app
|
||||
RUN pip install -r requirements.txt
|
||||
CMD ["python", "run.py"]
|
13
LICENSE.txt
|
@ -1,13 +0,0 @@
|
|||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
Version 2, December 2004
|
||||
|
||||
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim or modified
|
||||
copies of this license document, and changing it is allowed as long
|
||||
as the name is changed.
|
||||
|
||||
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. You just DO WHAT THE FUCK YOU WANT TO.
|
|
@ -1,6 +0,0 @@
|
|||
include run.py
|
||||
include get404.py
|
||||
include README.md
|
||||
include requirements.txt
|
||||
recursive-include pynyaata/static *
|
||||
recursive-include pynyaata/templates *
|
51
README.md
|
@ -1,51 +0,0 @@
|
|||
# π 😼た
|
||||
> "PyNyaaTa", Xéfir's personal anime torrent search engine
|
||||
|
||||
[![Build Status](https://ci.crystalyx.net/api/badges/Xefir/PyNyaaTa/status.svg)](https://ci.crystalyx.net/Xefir/PyNyaaTa)
|
||||
[![Docker Hub](https://img.shields.io/docker/pulls/xefir/pynyaata)](https://hub.docker.com/r/xefir/pynyaata)
|
||||
|
||||
I'm lazy, and I want to search across several VF and VOSTFR torrents databases in one click.
|
||||
That's the starting point that build this app.
|
||||
At first, it was a crappy PHP project without any good future.
|
||||
After a good rewrite in Python, it's time to show it to the public, and here it is!
|
||||
|
||||
## Installing / Getting started
|
||||
|
||||
### With Docker
|
||||
|
||||
- Install Docker: https://hub.docker.com/search/?type=edition&offering=community
|
||||
- Run `docker run -p 5000 xefir/pynyaata`
|
||||
- The app is accessible at http://localhost:5000
|
||||
|
||||
### Without Docker
|
||||
|
||||
- Install Python 3: https://www.python.org/downloads/
|
||||
- Install Pip: https://pip.pypa.io/en/stable/installing/
|
||||
- Run `pip install pynyaata`
|
||||
- Run `pynyaata`
|
||||
- The app is accessible at http://localhost:5000
|
||||
|
||||
## Features
|
||||
|
||||
* Search on [Nyaa.si](https://nyaa.si/) and [Anime-Ultime](http://www.anime-ultime.net/index-0-1)
|
||||
* Provide useful links to [TheTVDB](https://www.thetvdb.com/) and [Nautiljon](https://www.nautiljon.com/) during a search
|
||||
* Color official and bad links
|
||||
* Add seeded links to a database
|
||||
* Color seeded link on search
|
||||
* Run a batch to list all dead link on database
|
||||
|
||||
## Configuration
|
||||
|
||||
All is managed by environment variables.
|
||||
Please look into the `.env.dist` file to list all possible environment variables.
|
||||
You have to have a running database server to be able to access the admin panel.
|
||||
|
||||
## Links
|
||||
|
||||
- Project homepage: https://nyaa.crystalyx.net/
|
||||
- Source repository: https://git.crystalyx.net/Xefir/PyNyaaTa
|
||||
- Issue tracker: https://git.crystalyx.net/Xefir/PyNyaaTa/issues
|
||||
- My other projects: https://git.crystalyx.net/Xefir
|
||||
- Docker hub: https://hub.docker.com/r/xefir/pynyaata
|
||||
- Donations: https://paypal.me/Xefir
|
||||
|
21
get404.py
|
@ -1,21 +0,0 @@
|
|||
from pynyaata.connectors.core import curl_content
|
||||
from pynyaata.models import AnimeLink
|
||||
|
||||
links = AnimeLink.query.all()
|
||||
|
||||
for link in links:
|
||||
html = curl_content(link.link, debug=False)
|
||||
|
||||
if html['http_code'] != 200 and html['http_code'] != 500:
|
||||
print('(%d) %s %s : %s' % (
|
||||
html['http_code'],
|
||||
link.title.name,
|
||||
link.season,
|
||||
link.link
|
||||
))
|
||||
elif 'darkgray' in str(html['output']):
|
||||
print('(darkgray) %s %s : %s' % (
|
||||
link.title.name,
|
||||
link.season,
|
||||
link.link
|
||||
))
|
|
@ -1,284 +0,0 @@
|
|||
from asyncio import SelectorEventLoop, get_event_loop, set_event_loop
|
||||
from functools import wraps
|
||||
from operator import attrgetter, itemgetter
|
||||
|
||||
from flask import abort, redirect, render_template, request, url_for
|
||||
|
||||
from . import utils
|
||||
from .config import ADMIN_PASSWORD, ADMIN_USERNAME, APP_PORT, DB_ENABLED, IS_DEBUG, TRANSMISSION_ENABLED, app, auth
|
||||
from .connectors import Nyaa, get_instance, run_all
|
||||
from .connectors.core import ConnectorLang, ConnectorReturn
|
||||
from .forms import DeleteForm, EditForm, FolderDeleteForm, FolderEditForm, SearchForm
|
||||
|
||||
if DB_ENABLED:
|
||||
from .config import db
|
||||
from .models import AnimeFolder, AnimeTitle, AnimeLink
|
||||
|
||||
if TRANSMISSION_ENABLED:
|
||||
from .config import transmission
|
||||
|
||||
|
||||
def db_required(f):
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
if not DB_ENABLED:
|
||||
return abort(404)
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
def clean_titles():
|
||||
db.engine.execute("""
|
||||
DELETE
|
||||
FROM anime_title
|
||||
WHERE id IN (
|
||||
SELECT anime_title.id
|
||||
FROM anime_title
|
||||
LEFT JOIN anime_link ON anime_title.id = anime_link.title_id
|
||||
WHERE anime_link.id IS NULL
|
||||
)
|
||||
""")
|
||||
|
||||
|
||||
@auth.verify_password
|
||||
def verify_password(username, password):
|
||||
return username == ADMIN_USERNAME and ADMIN_PASSWORD == password
|
||||
|
||||
|
||||
@app.template_filter('boldify')
|
||||
def boldify(name):
|
||||
query = request.args.get('q', '')
|
||||
name = utils.boldify(name, query)
|
||||
if DB_ENABLED:
|
||||
for keyword in db.session.query(AnimeTitle.keyword.distinct()).all():
|
||||
if keyword[0].lower() != query.lower():
|
||||
name = utils.boldify(name, keyword[0])
|
||||
return name
|
||||
|
||||
|
||||
@app.template_filter('flagify')
|
||||
def flagify(is_vf):
|
||||
return ConnectorLang.FR.value if is_vf else ConnectorLang.JP.value
|
||||
|
||||
|
||||
@app.template_filter('colorify')
|
||||
def colorify(model):
|
||||
return get_instance(model.link, model.title.keyword).color
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def inject_user():
|
||||
return dict(db_disabled=not DB_ENABLED)
|
||||
|
||||
|
||||
@app.route('/')
|
||||
def home():
|
||||
return render_template('layout.html', search_form=SearchForm(), title='Anime torrents search engine')
|
||||
|
||||
|
||||
@app.route('/search')
|
||||
def search():
|
||||
query = request.args.get('q')
|
||||
if not query:
|
||||
return redirect(url_for('home'))
|
||||
|
||||
set_event_loop(SelectorEventLoop())
|
||||
torrents = get_event_loop().run_until_complete(run_all(query))
|
||||
return render_template('search.html', search_form=SearchForm(), connectors=torrents)
|
||||
|
||||
|
||||
@app.route('/latest')
|
||||
@app.route('/latest/<int:page>')
|
||||
def latest(page=1):
|
||||
set_event_loop(SelectorEventLoop())
|
||||
torrents = get_event_loop().run_until_complete(
|
||||
run_all('', return_type=ConnectorReturn.HISTORY, page=page)
|
||||
)
|
||||
|
||||
results = []
|
||||
for torrent in torrents:
|
||||
results = results + torrent.data
|
||||
for result in results:
|
||||
result['self'] = get_instance(result['href'])
|
||||
results.sort(key=itemgetter('date'), reverse=True)
|
||||
|
||||
return render_template('latest.html', search_form=SearchForm(), torrents=results, page=page)
|
||||
|
||||
|
||||
@app.route('/list')
|
||||
@app.route('/list/<url_filters>')
|
||||
@db_required
|
||||
def list_animes(url_filters='nyaa'):
|
||||
filters = None
|
||||
for i, to_filter in enumerate(url_filters.split(',')):
|
||||
if not i:
|
||||
filters = AnimeLink.link.contains(to_filter)
|
||||
else:
|
||||
filters = filters | AnimeLink.link.contains(to_filter)
|
||||
|
||||
titles = db.session.query(AnimeTitle, AnimeLink).join(
|
||||
AnimeLink).filter(filters).order_by(AnimeTitle.name).all()
|
||||
|
||||
results = {}
|
||||
for title, link in titles:
|
||||
if title.id not in results:
|
||||
results[title.id] = [link]
|
||||
else:
|
||||
results[title.id].append(link)
|
||||
|
||||
return render_template('list.html', search_form=SearchForm(), titles=results)
|
||||
|
||||
|
||||
@app.route('/admin', methods=['GET', 'POST'])
|
||||
@db_required
|
||||
@auth.login_required
|
||||
def admin():
|
||||
form = DeleteForm(request.form)
|
||||
|
||||
if form.validate_on_submit():
|
||||
link = AnimeLink.query.filter_by(id=int(form.id.data)).first()
|
||||
if link:
|
||||
form.message = '%s (%s) has been successfully deleted' % (
|
||||
link.title.name,
|
||||
link.season
|
||||
)
|
||||
db.session.delete(link)
|
||||
db.session.commit()
|
||||
|
||||
title = link.title
|
||||
if title and not len(title.links):
|
||||
db.session.delete(title)
|
||||
db.session.commit()
|
||||
else:
|
||||
form._errors = {
|
||||
'id': ['Id %s was not found in the database' % form.id.data]
|
||||
}
|
||||
|
||||
folders = AnimeFolder.query.all()
|
||||
for folder in folders:
|
||||
for title in folder.titles:
|
||||
title.links.sort(key=attrgetter('season'))
|
||||
folder.titles.sort(key=attrgetter('name'))
|
||||
|
||||
return render_template('admin/list.html', search_form=SearchForm(), folders=folders, action_form=form)
|
||||
|
||||
|
||||
@app.route('/admin/folder', methods=['GET', 'POST'])
|
||||
@db_required
|
||||
@auth.login_required
|
||||
def folder_list():
|
||||
form = FolderDeleteForm(request.form)
|
||||
|
||||
if form.validate_on_submit():
|
||||
folder = AnimeFolder.query.filter_by(id=int(form.id.data)).first()
|
||||
if folder:
|
||||
form.message = '%s has been successfully deleted' % folder.name
|
||||
db.session.delete(folder)
|
||||
db.session.commit()
|
||||
else:
|
||||
form._errors = {
|
||||
'id': ['Id %s was not found in the database' % form.id.data]
|
||||
}
|
||||
|
||||
folders = AnimeFolder.query.all()
|
||||
|
||||
return render_template('admin/folder/list.html', search_form=SearchForm(), folders=folders, action_form=form)
|
||||
|
||||
|
||||
@app.route('/admin/folder/edit', methods=['GET', 'POST'])
|
||||
@app.route('/admin/folder/edit/<int:folder_id>', methods=['GET', 'POST'])
|
||||
@db_required
|
||||
@auth.login_required
|
||||
def folder_edit(folder_id=None):
|
||||
folder = AnimeFolder.query.filter_by(id=folder_id).first()
|
||||
folder = folder if folder else AnimeFolder()
|
||||
form = FolderEditForm(
|
||||
request.form,
|
||||
id=folder.id,
|
||||
name=folder.name,
|
||||
path=folder.path
|
||||
)
|
||||
|
||||
if form.validate_on_submit():
|
||||
# Folder
|
||||
folder.name = form.name.data
|
||||
folder.path = form.path.data
|
||||
db.session.add(folder)
|
||||
db.session.commit()
|
||||
return redirect(url_for('folder_list'))
|
||||
|
||||
return render_template('admin/folder/edit.html', search_form=SearchForm(), action_form=form)
|
||||
|
||||
|
||||
@app.route('/admin/edit', methods=['GET', 'POST'])
|
||||
@app.route('/admin/edit/<int:link_id>', methods=['GET', 'POST'])
|
||||
@db_required
|
||||
@auth.login_required
|
||||
def admin_edit(link_id=None):
|
||||
link = AnimeLink.query.filter_by(id=link_id).first()
|
||||
link = link if link else AnimeLink()
|
||||
|
||||
folders = AnimeFolder.query.all()
|
||||
form = EditForm(
|
||||
request.form,
|
||||
id=link.id,
|
||||
folder=link.title.folder.id if link.title else None,
|
||||
name=link.title.name if link.title else None,
|
||||
link=link.link,
|
||||
season=link.season,
|
||||
comment=link.comment,
|
||||
keyword=link.title.keyword if link.title else None
|
||||
)
|
||||
form.folder.choices = [('', '')] + [(g.id, g.name) for g in folders]
|
||||
|
||||
if form.validate_on_submit():
|
||||
# Instance for VF tag
|
||||
instance = get_instance(form.link.data)
|
||||
|
||||
# Title
|
||||
title = AnimeTitle.query.filter_by(id=link.title_id).first()
|
||||
title = title if title else AnimeTitle.query.filter_by(
|
||||
name=form.name.data
|
||||
).first()
|
||||
title = title if title else AnimeTitle()
|
||||
title.folder_id = form.folder.data
|
||||
title.name = form.name.data
|
||||
title.keyword = form.keyword.data.lower()
|
||||
db.session.add(title)
|
||||
db.session.commit()
|
||||
|
||||
# Link
|
||||
link.title_id = title.id
|
||||
link.link = form.link.data
|
||||
link.season = form.season.data
|
||||
link.comment = form.comment.data
|
||||
link.vf = instance.is_vf(form.link.data)
|
||||
|
||||
# Database
|
||||
db.session.add(link)
|
||||
db.session.commit()
|
||||
clean_titles()
|
||||
|
||||
# Transmission
|
||||
if TRANSMISSION_ENABLED and isinstance(instance, Nyaa):
|
||||
if title.folder.path is not None and title.folder.path != '':
|
||||
download_url = link.link.replace(
|
||||
'/view/',
|
||||
'/download/'
|
||||
) + '.torrent'
|
||||
torrent_path = '%s/%s' % (title.folder.path, title.name)
|
||||
torrent = transmission.add_torrent(
|
||||
download_url,
|
||||
download_dir=torrent_path
|
||||
)
|
||||
transmission.move_torrent_data(torrent.id, torrent_path)
|
||||
transmission.start_torrent(torrent.id)
|
||||
|
||||
return redirect(url_for('admin'))
|
||||
|
||||
return render_template('admin/edit.html', search_form=SearchForm(), folders=folders, action_form=form)
|
||||
|
||||
|
||||
def run():
|
||||
app.run('0.0.0.0', APP_PORT, IS_DEBUG)
|
|
@ -1,62 +0,0 @@
|
|||
import logging
|
||||
from os import environ, urandom
|
||||
|
||||
from flask import Flask
|
||||
from flask.cli import load_dotenv
|
||||
from flask_httpauth import HTTPBasicAuth
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from redis import Redis
|
||||
from transmission_rpc.client import Client
|
||||
|
||||
load_dotenv()
|
||||
|
||||
IS_DEBUG = environ.get('FLASK_ENV', 'production') == 'development'
|
||||
ADMIN_USERNAME = environ.get('ADMIN_USERNAME', 'admin')
|
||||
ADMIN_PASSWORD = environ.get('ADMIN_PASSWORD', 'secret')
|
||||
APP_PORT = int(environ.get('FLASK_PORT', 5000))
|
||||
CACHE_TIMEOUT = int(environ.get('CACHE_TIMEOUT', 60 * 60))
|
||||
REQUESTS_TIMEOUT = int(environ.get('REQUESTS_TIMEOUT', 5))
|
||||
BLACKLIST_WORDS = environ.get('BLACKLIST_WORDS', '').split(',') if environ.get('BLACKLIST_WORDS', '') else []
|
||||
DB_ENABLED = False
|
||||
REDIS_ENABLED = False
|
||||
TRANSMISSION_ENABLED = False
|
||||
|
||||
app = Flask(__name__)
|
||||
app.name = 'PyNyaaTa'
|
||||
app.debug = IS_DEBUG
|
||||
app.secret_key = urandom(24).hex()
|
||||
app.url_map.strict_slashes = False
|
||||
auth = HTTPBasicAuth()
|
||||
logging.basicConfig(level=(logging.DEBUG if IS_DEBUG else logging.INFO))
|
||||
logger = logging.getLogger(app.name)
|
||||
|
||||
db_uri = environ.get('DATABASE_URI')
|
||||
if db_uri:
|
||||
DB_ENABLED = True
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = db_uri
|
||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
|
||||
app.config['SQLALCHEMY_ECHO'] = IS_DEBUG
|
||||
app.config['SQLALCHEMY_ENGINE_OPTIONS'] = {
|
||||
'pool_recycle': 200
|
||||
}
|
||||
db = SQLAlchemy(app)
|
||||
from .models import create_all
|
||||
|
||||
create_all()
|
||||
|
||||
cache_host = environ.get('REDIS_SERVER')
|
||||
if cache_host:
|
||||
REDIS_ENABLED = True
|
||||
cache = Redis(cache_host)
|
||||
|
||||
transmission_host = environ.get('TRANSMISSION_SERVER')
|
||||
if transmission_host:
|
||||
TRANSMISSION_ENABLED = True
|
||||
transmission_username = environ.get('TRANSMISSION_RPC_USERNAME')
|
||||
transmission_password = environ.get('TRANSMISSION_RPC_PASSWORD')
|
||||
transmission = Client(
|
||||
username=transmission_username,
|
||||
password=transmission_password,
|
||||
host=transmission_host,
|
||||
logger=logger
|
||||
)
|
|
@ -1,21 +0,0 @@
|
|||
from asyncio import gather
|
||||
|
||||
from .animeultime import AnimeUltime
|
||||
from .core import Other
|
||||
from .nyaa import Nyaa
|
||||
|
||||
|
||||
async def run_all(*args, **kwargs):
|
||||
coroutines = [Nyaa(*args, **kwargs).run(),
|
||||
AnimeUltime(*args, **kwargs).run()]
|
||||
|
||||
return list(await gather(*coroutines))
|
||||
|
||||
|
||||
def get_instance(url, query=''):
|
||||
if 'nyaa.si' in url:
|
||||
return Nyaa(query)
|
||||
elif 'anime-ultime' in url:
|
||||
return AnimeUltime(query)
|
||||
else:
|
||||
return Other(query)
|
|
@ -1,113 +0,0 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from .core import ConnectorCache, ConnectorCore, ConnectorReturn, curl_content
|
||||
from ..utils import link_exist_in_db, parse_date
|
||||
|
||||
|
||||
class AnimeUltime(ConnectorCore):
|
||||
color = 'is-warning'
|
||||
title = 'Anime-Ultime'
|
||||
favicon = 'animeultime.png'
|
||||
base_url = 'http://www.anime-ultime.net'
|
||||
is_light = True
|
||||
|
||||
def get_full_search_url(self):
|
||||
from_date = ''
|
||||
sort_type = 'search'
|
||||
|
||||
if self.return_type is ConnectorReturn.HISTORY:
|
||||
try:
|
||||
page_date = datetime.now() - timedelta((int(self.page) - 1) * 365 / 12)
|
||||
except OverflowError:
|
||||
page_date = datetime.fromtimestamp(0)
|
||||
from_date = page_date.strftime('%m%Y')
|
||||
sort_type = 'history'
|
||||
|
||||
return '%s/%s-0-1/%s' % (self.base_url, sort_type, from_date)
|
||||
|
||||
@ConnectorCache.cache_data
|
||||
def search(self):
|
||||
response = curl_content(self.get_full_search_url(), {
|
||||
'search': self.query
|
||||
})
|
||||
|
||||
if response['http_code'] == 200:
|
||||
html = BeautifulSoup(response['output'], 'html.parser')
|
||||
title = html.select('div.title')
|
||||
player = html.select('div.AUVideoPlayer')
|
||||
|
||||
if len(title) > 0 and 'Recherche' in title[0].get_text():
|
||||
trs = html.select('table.jtable tr')
|
||||
|
||||
for i, tr in enumerate(trs):
|
||||
if not i:
|
||||
continue
|
||||
|
||||
tds = tr.findAll('td')
|
||||
|
||||
if len(tds) < 2:
|
||||
continue
|
||||
|
||||
url = tds[0].a
|
||||
href = '%s/%s' % (self.base_url, url['href'])
|
||||
|
||||
if not any(href == d['href'] for d in self.data):
|
||||
self.data.append({
|
||||
'vf': self.is_vf(),
|
||||
'href': href,
|
||||
'name': url.get_text(),
|
||||
'type': tds[1].get_text(),
|
||||
'class': self.color if link_exist_in_db(href) else ''
|
||||
})
|
||||
elif len(player) > 0:
|
||||
name = html.select('h1')
|
||||
ani_type = html.select('div.titre')
|
||||
href = '%s/file-0-1/%s' % (
|
||||
self.base_url,
|
||||
player[0]['data-serie']
|
||||
)
|
||||
|
||||
self.data.append({
|
||||
'vf': self.is_vf(),
|
||||
'href': href,
|
||||
'name': name[0].get_text(),
|
||||
'type': ani_type[0].get_text().replace(':', ''),
|
||||
'class': self.color if link_exist_in_db(href) else ''
|
||||
})
|
||||
|
||||
self.on_error = False
|
||||
|
||||
@ConnectorCache.cache_data
|
||||
def get_history(self):
|
||||
response = curl_content(self.get_full_search_url())
|
||||
|
||||
if response['http_code'] == 200:
|
||||
html = BeautifulSoup(response['output'], 'html.parser')
|
||||
tables = html.select('table.jtable')
|
||||
h3s = html.findAll('h3')
|
||||
|
||||
for i, table in enumerate(tables):
|
||||
for j, tr in enumerate(table.findAll('tr')):
|
||||
if not j:
|
||||
continue
|
||||
|
||||
tds = tr.findAll('td')
|
||||
link = tds[0].a
|
||||
href = '%s/%s' % (self.base_url, link['href'])
|
||||
|
||||
self.data.append({
|
||||
'vf': self.is_vf(),
|
||||
'href': href,
|
||||
'name': link.get_text(),
|
||||
'type': tds[4].get_text(),
|
||||
'date': parse_date(h3s[i].string[:-3], '%A %d %B %Y'),
|
||||
'class': self.color if link_exist_in_db(href) else ''
|
||||
})
|
||||
|
||||
self.on_error = False
|
||||
|
||||
@ConnectorCache.cache_data
|
||||
def is_vf(self, url=''):
|
||||
return False
|
|
@ -1,181 +0,0 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from enum import Enum
|
||||
from functools import wraps
|
||||
from json import dumps, loads
|
||||
|
||||
from redis.exceptions import RedisError
|
||||
from requests import RequestException, Session
|
||||
|
||||
from ..config import CACHE_TIMEOUT, REDIS_ENABLED, REQUESTS_TIMEOUT, logger
|
||||
|
||||
if REDIS_ENABLED:
|
||||
from ..config import cache
|
||||
|
||||
|
||||
class ConnectorReturn(Enum):
|
||||
SEARCH = 1
|
||||
HISTORY = 2
|
||||
|
||||
|
||||
class ConnectorLang(Enum):
|
||||
FR = '🇫🇷'
|
||||
JP = '🇯🇵'
|
||||
|
||||
|
||||
class Cache:
|
||||
def cache_data(self, f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwds):
|
||||
connector = args[0]
|
||||
key = 'pynyaata.%s.%s.%s.%s' % (
|
||||
connector.__class__.__name__,
|
||||
f.__name__,
|
||||
connector.query,
|
||||
connector.page
|
||||
)
|
||||
|
||||
if REDIS_ENABLED:
|
||||
json = None
|
||||
|
||||
try:
|
||||
json = cache.get(key)
|
||||
except RedisError:
|
||||
pass
|
||||
|
||||
if json:
|
||||
data = loads(json)
|
||||
connector.data = data['data']
|
||||
connector.is_more = data['is_more']
|
||||
connector.on_error = False
|
||||
return
|
||||
|
||||
ret = f(*args, **kwds)
|
||||
|
||||
if not connector.on_error and REDIS_ENABLED:
|
||||
try:
|
||||
cache.set(key, dumps({
|
||||
'data': connector.data,
|
||||
'is_more': connector.is_more
|
||||
}), CACHE_TIMEOUT)
|
||||
except RedisError:
|
||||
pass
|
||||
|
||||
return ret
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
ConnectorCache = Cache()
|
||||
|
||||
|
||||
def curl_content(url, params=None, ajax=False, debug=True):
|
||||
output = ''
|
||||
http_code = 500
|
||||
method = 'post' if (params is not None) else 'get'
|
||||
request = Session()
|
||||
headers = {}
|
||||
|
||||
if ajax:
|
||||
headers['X-Requested-With'] = 'XMLHttpRequest'
|
||||
|
||||
try:
|
||||
if method == 'post':
|
||||
response = request.post(
|
||||
url,
|
||||
params,
|
||||
timeout=REQUESTS_TIMEOUT,
|
||||
headers=headers
|
||||
)
|
||||
else:
|
||||
response = request.get(
|
||||
url,
|
||||
timeout=REQUESTS_TIMEOUT,
|
||||
headers=headers
|
||||
)
|
||||
|
||||
output = response.text
|
||||
http_code = response.status_code
|
||||
except RequestException as e:
|
||||
if debug:
|
||||
logger.exception(e)
|
||||
|
||||
return {'http_code': http_code, 'output': output}
|
||||
|
||||
|
||||
class ConnectorCore(ABC):
|
||||
@property
|
||||
@abstractmethod
|
||||
def color(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def title(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def favicon(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def base_url(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def is_light(self):
|
||||
pass
|
||||
|
||||
def __init__(self, query, page=1, return_type=ConnectorReturn.SEARCH):
|
||||
self.query = query
|
||||
self.data = []
|
||||
self.is_more = False
|
||||
self.on_error = True
|
||||
self.page = page
|
||||
self.return_type = return_type
|
||||
|
||||
@abstractmethod
|
||||
def get_full_search_url(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def search(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_history(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def is_vf(self, url):
|
||||
pass
|
||||
|
||||
async def run(self):
|
||||
if self.on_error:
|
||||
if self.return_type is ConnectorReturn.SEARCH:
|
||||
self.search()
|
||||
elif self.return_type is ConnectorReturn.HISTORY:
|
||||
self.get_history()
|
||||
return self
|
||||
|
||||
|
||||
class Other(ConnectorCore):
|
||||
color = 'is-danger'
|
||||
title = 'Other'
|
||||
favicon = 'blank.png'
|
||||
base_url = ''
|
||||
is_light = True
|
||||
|
||||
def get_full_search_url(self):
|
||||
pass
|
||||
|
||||
def search(self):
|
||||
pass
|
||||
|
||||
def get_history(self):
|
||||
pass
|
||||
|
||||
def is_vf(self, url):
|
||||
return False
|
|
@ -1,91 +0,0 @@
|
|||
from bs4 import BeautifulSoup
|
||||
|
||||
from .core import ConnectorCache, ConnectorCore, ConnectorReturn, curl_content
|
||||
from ..utils import check_blacklist_words, check_if_vf, link_exist_in_db
|
||||
|
||||
|
||||
class Nyaa(ConnectorCore):
|
||||
color = 'is-link'
|
||||
title = 'Nyaa'
|
||||
favicon = 'nyaa.png'
|
||||
base_url = 'https://nyaa.si'
|
||||
is_light = False
|
||||
|
||||
def get_full_search_url(self):
|
||||
sort_type = 'size'
|
||||
if self.return_type is ConnectorReturn.HISTORY:
|
||||
sort_type = 'id'
|
||||
|
||||
to_query = '(%s vf)|(%s vostfr)|(%s multi)|(%s french)' % (
|
||||
self.query,
|
||||
self.query,
|
||||
self.query,
|
||||
self.query
|
||||
)
|
||||
return '%s/?f=0&c=1_3&s=%s&o=desc&q=%s&p=%s' % (self.base_url, sort_type, to_query, self.page)
|
||||
|
||||
def get_history(self):
|
||||
self.search()
|
||||
|
||||
@ConnectorCache.cache_data
|
||||
def search(self):
|
||||
response = curl_content(self.get_full_search_url())
|
||||
|
||||
if response['http_code'] == 200:
|
||||
html = BeautifulSoup(response['output'], 'html.parser')
|
||||
trs = html.select('table.torrent-list tr')
|
||||
valid_trs = 0
|
||||
|
||||
for i, tr in enumerate(trs):
|
||||
if not i:
|
||||
continue
|
||||
|
||||
tds = tr.findAll('td')
|
||||
check_downloads = int(tds[7].get_text())
|
||||
check_seeds = int(tds[5].get_text())
|
||||
|
||||
if check_downloads or check_seeds:
|
||||
urls = tds[1].findAll('a')
|
||||
|
||||
if len(urls) > 1:
|
||||
url = urls[1]
|
||||
has_comment = True
|
||||
else:
|
||||
url = urls[0]
|
||||
has_comment = False
|
||||
|
||||
url_safe = url.get_text()
|
||||
|
||||
if check_blacklist_words(url_safe):
|
||||
continue
|
||||
|
||||
valid_trs = valid_trs + 1
|
||||
href = self.base_url + url['href']
|
||||
|
||||
self.data.append({
|
||||
'vf': check_if_vf(url_safe),
|
||||
'href': href,
|
||||
'name': url_safe,
|
||||
'comment': str(urls[0]).replace('/view/', self.base_url + '/view/') if has_comment else '',
|
||||
'link': tds[2].decode_contents().replace('/download/', self.base_url + '/download/'),
|
||||
'size': tds[3].get_text(),
|
||||
'date': tds[4].get_text(),
|
||||
'seeds': check_seeds,
|
||||
'leechs': tds[6].get_text(),
|
||||
'downloads': check_downloads,
|
||||
'class': self.color if link_exist_in_db(href) else 'is-%s' % tr['class'][0]
|
||||
})
|
||||
|
||||
self.on_error = False
|
||||
self.is_more = valid_trs and valid_trs != len(trs) - 1
|
||||
|
||||
@ConnectorCache.cache_data
|
||||
def is_vf(self, url):
|
||||
response = curl_content(url)
|
||||
|
||||
if response['http_code'] == 200:
|
||||
html = BeautifulSoup(response['output'], 'html.parser')
|
||||
title = html.select('h3.panel-title')
|
||||
return check_if_vf(title[0].get_text())
|
||||
|
||||
return False
|
|
@ -1,38 +0,0 @@
|
|||
from flask_wtf import FlaskForm
|
||||
from wtforms import HiddenField, SelectField, StringField
|
||||
from wtforms.fields.html5 import SearchField, URLField
|
||||
from wtforms.validators import DataRequired
|
||||
|
||||
|
||||
class SearchForm(FlaskForm):
|
||||
q = SearchField('search', validators=[DataRequired()])
|
||||
|
||||
|
||||
class DeleteForm(FlaskForm):
|
||||
class Meta:
|
||||
csrf = False
|
||||
|
||||
id = HiddenField('id', validators=[DataRequired()])
|
||||
|
||||
|
||||
class EditForm(FlaskForm):
|
||||
id = HiddenField('id')
|
||||
folder = SelectField('folder', validators=[DataRequired()])
|
||||
name = StringField('name', validators=[DataRequired()])
|
||||
link = URLField('link', validators=[DataRequired()])
|
||||
season = StringField('season', validators=[DataRequired()])
|
||||
comment = StringField('comment')
|
||||
keyword = StringField('keyword', validators=[DataRequired()])
|
||||
|
||||
|
||||
class FolderEditForm(FlaskForm):
|
||||
id = HiddenField('id')
|
||||
name = StringField('name', validators=[DataRequired()])
|
||||
path = StringField('path')
|
||||
|
||||
|
||||
class FolderDeleteForm(FlaskForm):
|
||||
class Meta:
|
||||
csrf = False
|
||||
|
||||
id = HiddenField('id', validators=[DataRequired()])
|
|
@ -1,37 +0,0 @@
|
|||
from .config import db
|
||||
|
||||
|
||||
class AnimeFolder(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String(length=100), unique=True, nullable=False)
|
||||
path = db.Column(db.String(length=100))
|
||||
titles = db.relationship(
|
||||
"AnimeTitle",
|
||||
backref="folder",
|
||||
cascade='all,delete-orphan'
|
||||
)
|
||||
|
||||
|
||||
class AnimeTitle(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String(length=100), unique=True, nullable=False)
|
||||
keyword = db.Column(db.Text(), nullable=False)
|
||||
folder_id = db.Column(db.Integer, db.ForeignKey('anime_folder.id'))
|
||||
links = db.relationship(
|
||||
'AnimeLink',
|
||||
backref="title",
|
||||
cascade='all,delete-orphan'
|
||||
)
|
||||
|
||||
|
||||
class AnimeLink(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
link = db.Column(db.Text(), nullable=False)
|
||||
season = db.Column(db.Text(), nullable=False)
|
||||
comment = db.Column(db.Text())
|
||||
vf = db.Column(db.Boolean, nullable=False)
|
||||
title_id = db.Column(db.Integer, db.ForeignKey('anime_title.id'))
|
||||
|
||||
|
||||
def create_all():
|
||||
db.create_all()
|
|
@ -1,130 +0,0 @@
|
|||
html {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
position: relative;
|
||||
min-height: 100%;
|
||||
padding-bottom: 1rem;
|
||||
}
|
||||
|
||||
footer {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
text-align: center;
|
||||
font-size: small;
|
||||
margin: 0.5rem;
|
||||
color: #7a7a7a;
|
||||
}
|
||||
|
||||
section {
|
||||
overflow-x: auto;
|
||||
padding-top: 2rem !important;
|
||||
}
|
||||
|
||||
nav, nav > div {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
a.navbar-item, a.navbar-item:hover {
|
||||
color: whitesmoke;
|
||||
}
|
||||
|
||||
a.navbar-item:hover {
|
||||
background-color: #292929;
|
||||
}
|
||||
|
||||
div.navbar-end {
|
||||
flex-basis: min-content;
|
||||
}
|
||||
|
||||
th.error {
|
||||
color: red;
|
||||
}
|
||||
|
||||
img.favicon {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
position: relative;
|
||||
top: 2px;
|
||||
}
|
||||
|
||||
button.fa-button {
|
||||
padding: 0;
|
||||
cursor: pointer;
|
||||
border: none;
|
||||
}
|
||||
|
||||
label.checkbox {
|
||||
margin: 1.2rem 1.2rem 0;
|
||||
}
|
||||
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.table td:last-child {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.table td:last-child form {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.table td.is-primary, .table tr.is-primary {
|
||||
background-color: rgba(0, 209, 178, 0.2) !important;
|
||||
border-color: rgba(0, 209, 178, 0.1);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.table td.is-link, .table tr.is-link {
|
||||
background-color: rgba(50, 115, 220, 0.2) !important;
|
||||
border-color: rgba(50, 115, 220, 0.1);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.table td.is-info, .table tr.is-info {
|
||||
background-color: rgba(32, 156, 238, 0.2) !important;
|
||||
border-color: rgba(32, 156, 238, 0.1);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.table td.is-success, .table tr.is-success {
|
||||
background-color: rgba(35, 209, 96, 0.2) !important;
|
||||
border-color: rgba(35, 209, 96, 0.1);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.table td.is-warning, .table tr.is-warning {
|
||||
background-color: rgba(255, 221, 87, 0.2) !important;
|
||||
border-color: rgba(255, 221, 87, 0.1);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.table td.is-danger, .table tr.is-danger {
|
||||
background-color: rgba(255, 56, 96, 0.2) !important;
|
||||
border-color: rgba(255, 56, 96, 0.1);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.quick-scroll {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.table.is-hoverable tbody tr:not(.is-selected):hover {
|
||||
background-color: #f1f1f1 !important;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
.table.is-hoverable tbody tr:not(.is-selected):hover {
|
||||
background-color: #333 !important;
|
||||
}
|
||||
|
||||
.select > select {
|
||||
border-color: #363636 !important;
|
||||
background-color: #0a0a0a !important;
|
||||
color: #dbdbdb !important;
|
||||
}
|
||||
}
|
Before Width: | Height: | Size: 2.1 KiB |
Before Width: | Height: | Size: 119 B |
Before Width: | Height: | Size: 134 KiB |
Before Width: | Height: | Size: 2.0 KiB |
Before Width: | Height: | Size: 3.0 KiB |
Before Width: | Height: | Size: 3.0 KiB |
|
@ -1,99 +0,0 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block title %}- Admin Edit {{ action_form.name.data }}{% endblock %}
|
||||
{% block body %}
|
||||
<form method="post">
|
||||
{{ action_form.csrf_token }}
|
||||
|
||||
<div class="field is-horizontal">
|
||||
<div class="field-body">
|
||||
<div class="field column">
|
||||
<div class="control is-expanded">
|
||||
<div class="select is-fullwidth">
|
||||
{{ action_form.folder }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="field column is-6">
|
||||
<div class="control is-expanded">
|
||||
<div class="select is-fullwidth">
|
||||
{{ action_form.name(list='animes', class='input', placeholder='Name') }}
|
||||
<datalist id="animes">
|
||||
{% for folder in folders %}
|
||||
{% for title in folder.titles %}
|
||||
<option {{ 'selected' if title.name == action_form.name.data }}
|
||||
data-folder="{{ title.folder.id }}" value="{{ title.name }}"
|
||||
data-keyword="{{ title.keyword }}">
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</datalist>
|
||||
|
||||
<script>
|
||||
document.getElementById('name').oninput = function (choice) {
|
||||
document.getElementById('animes').childNodes.forEach(function (option) {
|
||||
if (option.value === choice.target.value) {
|
||||
document.getElementById('folder').value = option.dataset.folder;
|
||||
document.getElementById('keyword').value = option.dataset.keyword;
|
||||
}
|
||||
});
|
||||
};
|
||||
</script>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="field is-horizontal">
|
||||
<div class="field-body">
|
||||
<div class="field column is-6">
|
||||
<div class="control is-expanded">
|
||||
{{ action_form.link(class='input', placeholder='Link') }}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="field column">
|
||||
<div class="control is-expanded">
|
||||
{{ action_form.season(class='input', placeholder='Season') }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="field is-horizontal">
|
||||
<div class="field-body">
|
||||
<div class="field column is-6">
|
||||
<div class="control is-expanded">
|
||||
{{ action_form.comment(class='input', placeholder='Comment') }}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="field column">
|
||||
<div class="control is-expanded">
|
||||
<div class="select is-fullwidth">
|
||||
{{ action_form.keyword(list='keywords', class='input', placeholder='Keyword') }}
|
||||
<datalist id="keywords">
|
||||
{% for folder in folders %}
|
||||
{% for title in folder.titles %}
|
||||
<option {{ 'selected' if title.keyword == action_form.keyword.data }}
|
||||
value="{{ title.keyword }}">
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</datalist>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="field is-horizontal">
|
||||
<div class="field-body">
|
||||
<div class="field column">
|
||||
<div class="control is-expanded">
|
||||
<input class="button is-info" type="submit">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
{% endblock %}
|
|
@ -1,29 +0,0 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block title %}- Folder Edit {{ action_form.name.data }}{% endblock %}
|
||||
{% block body %}
|
||||
<form method="post">
|
||||
{{ action_form.csrf_token }}
|
||||
|
||||
<div class="field is-horizontal">
|
||||
<div class="field-body">
|
||||
<div class="field column is-5">
|
||||
<div class="control is-expanded">
|
||||
{{ action_form.name(class='input', placeholder='Name') }}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="field column is-5">
|
||||
<div class="control is-expanded">
|
||||
{{ action_form.path(class='input', placeholder='Path') }}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="field column">
|
||||
<div class="control is-expanded">
|
||||
<input class="button is-info" type="submit">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
{% endblock %}
|
|
@ -1,47 +0,0 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block title %}- Folder List{% endblock %}
|
||||
{% block add_button %}
|
||||
<a class="navbar-item has-tooltip-bottom has-tooltip-hidden-desktop" data-tooltip="Back"
|
||||
href="{{ url_for('admin') }}">
|
||||
<i class="fa fa-arrow-left"></i><i> </i>
|
||||
<span class="is-hidden-mobile">Back</span>
|
||||
</a>
|
||||
<a class="navbar-item has-tooltip-bottom has-tooltip-hidden-desktop" data-tooltip="Add folder"
|
||||
href="{{ url_for('folder_edit') }}">
|
||||
<i class="fa fa-plus"></i><i> </i>
|
||||
<span class="is-hidden-mobile">Add folder</span>
|
||||
</a>
|
||||
{% endblock %}
|
||||
{% block body %}
|
||||
<table class="table is-bordered is-striped is-narrow is-fullwidth is-hoverable is-size-7">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Path</th>
|
||||
<th>Tools</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{% for folder in folders %}
|
||||
<tr>
|
||||
<td>{{ folder.name }}</td>
|
||||
<td>{{ folder.path }}</td>
|
||||
<td>
|
||||
<a href="{{ url_for('folder_edit', folder_id=folder.id) }}">
|
||||
<i class="fa fa-pencil"></i>
|
||||
</a>
|
||||
<i> </i>
|
||||
<form method="post">
|
||||
{{ action_form.id(value=folder.id) }}
|
||||
<button class="fa fa-trash fa-button"
|
||||
onclick="return confirm('Are you sure you want to delete this item ?')">
|
||||
</button>
|
||||
</form>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
{% endblock %}
|
|
@ -1,85 +0,0 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block title %}- Admin List{% endblock %}
|
||||
{% block add_button %}
|
||||
<a class="navbar-item has-tooltip-bottom has-tooltip-hidden-desktop" data-tooltip="Add entry" href="{{ url_for('admin_edit') }}">
|
||||
<i class="fa fa-plus"></i><i> </i>
|
||||
<span class="is-hidden-mobile">Add entry</span>
|
||||
</a>
|
||||
<a class="navbar-item has-tooltip-bottom has-tooltip-hidden-desktop" data-tooltip="Manage folders" href="{{ url_for('folder_list') }}">
|
||||
<i class="fa fa-folder-open"></i><i> </i>
|
||||
<span class="is-hidden-mobile">Manage folders</span>
|
||||
</a>
|
||||
{% endblock %}
|
||||
{% block body %}
|
||||
<div class="level-right quick-scroll">
|
||||
<span class="level-item">Quick Scroll :</span>
|
||||
{% for folder in folders %}
|
||||
{% if loop.index0 %}
|
||||
<a class="level-item" href="#{{ folder.name }}">{{ folder.name }}</a>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
<table class="table is-bordered is-striped is-narrow is-fullwidth is-hoverable is-size-7">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Link</th>
|
||||
<th>Season</th>
|
||||
<th>Comment</th>
|
||||
<th>Tools</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{% for folder in folders %}
|
||||
{% if folder.titles|length > 0 %}
|
||||
<th colspan="5" id="{{ folder.name }}">{{ folder.name }}</th>
|
||||
|
||||
{% for title in folder.titles %}
|
||||
{% for link in title.links %}
|
||||
<tr>
|
||||
{% if not loop.index0 %}
|
||||
<td rowspan="{{ title.links|length }}">
|
||||
{{ title.name }}
|
||||
</td>
|
||||
{% endif %}
|
||||
|
||||
<td class="{{ link|colorify }}">
|
||||
{{ link.vf|flagify }}
|
||||
{{ link.link|urlize(30, target='_blank') }}
|
||||
</td>
|
||||
|
||||
<td>
|
||||
{{ link.season }}
|
||||
</td>
|
||||
|
||||
<td>
|
||||
{{ link.comment|urlize(target='_blank') }}
|
||||
</td>
|
||||
|
||||
<td>
|
||||
<a href="{{ url_for('search', q=link.title.keyword) }}" target="_blank">
|
||||
<i class="fa fa-search"></i>
|
||||
</a>
|
||||
<i> </i>
|
||||
<a href="{{ url_for('admin_edit', link_id=link.id) }}">
|
||||
<i class="fa fa-pencil"></i>
|
||||
</a>
|
||||
<i> </i>
|
||||
<form method="post">
|
||||
{{ action_form.id(value=link.id) }}
|
||||
<button class="fa fa-trash fa-button"
|
||||
onclick="return confirm('Are you sure you want to delete this item ?')">
|
||||
</button>
|
||||
</form>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
{% endblock %}
|
|
@ -1,82 +0,0 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block title %}- Latest torrents{% endblock %}
|
||||
{% block body %}
|
||||
<table class="table is-bordered is-striped is-narrow is-fullwidth is-hoverable is-size-7">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Link</th>
|
||||
<th>Size</th>
|
||||
<th>Date</th>
|
||||
<th>
|
||||
<i class="fa fa-arrow-up"></i>
|
||||
</th>
|
||||
<th>
|
||||
<i class="fa fa-arrow-down"></i>
|
||||
</th>
|
||||
<th>
|
||||
<i class="fa fa fa-check"></i>
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{% for torrent in torrents %}
|
||||
<tr class="{{ torrent.class }}">
|
||||
<td colspan="{{ '3' if torrent.self.is_light else '' }}">
|
||||
<img class="favicon"
|
||||
src="{{ url_for('static', filename='favicons/%s' % torrent.self.favicon) }}" alt="">
|
||||
<i> </i>
|
||||
{{ torrent.vf|flagify }}
|
||||
<a href="{{ torrent.href }}" target="_blank">
|
||||
{{ torrent.name|boldify|safe }}
|
||||
</a>
|
||||
</td>
|
||||
|
||||
{% if torrent.self.is_light %}
|
||||
<td>
|
||||
{{ torrent.date }}
|
||||
</td>
|
||||
<td colspan="3">
|
||||
{{ torrent.type }}
|
||||
</td>
|
||||
{% else %}
|
||||
<td>
|
||||
{{ torrent.link|safe }}
|
||||
{{ torrent.comment|safe }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.size }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.date }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.seeds }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.leechs }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.downloads }}
|
||||
</td>
|
||||
{% endif %}
|
||||
|
||||
</tr>
|
||||
{% endfor %}
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<nav class="pagination is-right" role="navigation" aria-label="pagination">
|
||||
{% if page > 1 %}
|
||||
<a class="pagination-previous" href="{{ url_for('latest', page=(page - 1)) }}">
|
||||
Previous
|
||||
</a>
|
||||
{% endif %}
|
||||
<a class="pagination-next" href="{{ url_for('latest', page=(page + 1)) }}">
|
||||
Next page
|
||||
</a>
|
||||
<ul class="pagination-list"></ul>
|
||||
</nav>
|
||||
{% endblock %}
|
|
@ -1,98 +0,0 @@
|
|||
<!doctype html>
|
||||
<html lang="fr">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=2.0, minimum-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="ie=edge">
|
||||
<meta name="description" content="Xefir's anime search engine (っ^‿^)っ">
|
||||
<title>PyNyaaTa - {% block title %}{% endblock %}</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='favicons/favicon.ico') }}">
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/bulma.min.css') }}">
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/bulma-prefers-dark.min.css') }}">
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/bulma-tooltip.min.css') }}">
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/font-awesome.min.css') }}">
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/styles.css') }}">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<nav class="navbar is-dark" role="navigation" aria-label="main navigation">
|
||||
<div class="navbar-start">
|
||||
<a class="navbar-item has-tooltip-bottom has-tooltip-hidden-desktop" data-tooltip="Home" href="{{ url_for('home') }}">
|
||||
π 😼た
|
||||
</a>
|
||||
<a class="navbar-item has-tooltip-bottom has-tooltip-hidden-desktop" data-tooltip="Latest torrents" href="{{ url_for('latest') }}">
|
||||
<i class="fa fa-newspaper-o"></i><i> </i>
|
||||
<span class="is-hidden-mobile">Latest torrents</span>
|
||||
</a>
|
||||
{% if not db_disabled %}
|
||||
<a class="navbar-item has-tooltip-bottom has-tooltip-hidden-desktop" data-tooltip="My seeded torrents"
|
||||
href="{{ url_for('list_animes') }}">
|
||||
<i class="fa fa-cloud-download"></i><i> </i>
|
||||
<span class="is-hidden-mobile">My seeded torrents</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
{% block add_button %}{% endblock %}
|
||||
{% if request.args.get('q') %}
|
||||
<a class="navbar-item has-tooltip-bottom has-tooltip-hidden-desktop" data-tooltip="TVDB"
|
||||
href="https://www.thetvdb.com/search?menu%5Btype%5D=series&query={{ request.args.get('q') }}"
|
||||
target="_blank">
|
||||
<i class="fa fa-television"></i><i> </i>
|
||||
<span class="is-hidden-mobile">TVDB</span>
|
||||
</a>
|
||||
<a class="navbar-item has-tooltip-bottom has-tooltip-hidden-desktop" data-tooltip="Nautiljon"
|
||||
href="https://www.nautiljon.com/search.php?q={{ request.args.get('q') }}" target="_blank">
|
||||
<i class="fa fa-rss"></i><i> </i>
|
||||
<span class="is-hidden-mobile">Nautiljon</span>
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="navbar-end">
|
||||
<form action="{{ url_for('search') }}" class="navbar-item">
|
||||
<div class="field has-addons">
|
||||
<div class="control">
|
||||
{{ search_form.q(placeholder='Search ...', class='input', value=request.args.get('q', '')) }}
|
||||
</div>
|
||||
<div class="control">
|
||||
<button type="submit" class="button is-info">
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<section class="section" role="main">
|
||||
{% if action_form %}
|
||||
{% if action_form.errors %}
|
||||
<div class="notification is-danger">
|
||||
<button class="delete" onclick="this.parentNode.style.display = 'none'"></button>
|
||||
<ul>
|
||||
{% for field in action_form.errors %}
|
||||
{% for error in action_form.errors[field] %}
|
||||
<li>"{{ field }}" => {{ error }}</li>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if action_form.message %}
|
||||
<div class="notification is-success">
|
||||
<button class="delete" onclick="this.parentNode.style.display = 'none'"></button>
|
||||
{{ action_form.message }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% block body %}{% endblock %}
|
||||
</section>
|
||||
|
||||
<footer>
|
||||
<b>PyNyaata</b> made by <i>Xéfir Destiny</i>.
|
||||
This software is open source under <a target="_blank" href="http://www.wtfpl.net">WTFPL</a> license !
|
||||
Please look at the <a target="_blank" href="https://git.crystalyx.net/Xefir/PyNyaaTa">source code</a>
|
||||
or <a target="_blank" href="https://hub.docker.com/r/xefir/pynyaata">host it</a> yourself o/
|
||||
</footer>
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1,46 +0,0 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block title %}- My seeded torrents{% endblock %}
|
||||
{% block body %}
|
||||
<table class="table is-bordered is-striped is-narrow is-fullwidth is-hoverable is-size-7">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Link</th>
|
||||
<th>Season</th>
|
||||
<th>Tools</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{% for title in titles.values() %}
|
||||
{% for link in title %}
|
||||
<tr>
|
||||
{% if not loop.index0 %}
|
||||
<td rowspan="{{ title|length }}">
|
||||
{{ link.title.name }}
|
||||
</td>
|
||||
{% endif %}
|
||||
|
||||
<td class="{{ link|colorify }}">
|
||||
{{ link.vf|flagify }}
|
||||
{{ link.link|urlize(30, target='_blank') }}
|
||||
</td>
|
||||
|
||||
<td>
|
||||
{{ link.season }}
|
||||
</td>
|
||||
|
||||
{% if not loop.index0 %}
|
||||
<td rowspan="{{ title|length }}">
|
||||
<a href="{{ url_for('search', q=link.title.keyword) }}" target="_blank">
|
||||
<i class="fa fa-search"></i>
|
||||
</a>
|
||||
</td>
|
||||
{% endif %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
{% endblock %}
|
|
@ -1,91 +0,0 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block title %}- Search for "{{ request.args.get('q') }}"{% endblock %}
|
||||
{% block body %}
|
||||
<table class="table is-bordered is-striped is-narrow is-fullwidth is-hoverable">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>
|
||||
<i class="fa fa-comment"></i>
|
||||
</th>
|
||||
<th>Link</th>
|
||||
<th>Size</th>
|
||||
<th>Date</th>
|
||||
<th>
|
||||
<i class="fa fa-arrow-up"></i>
|
||||
</th>
|
||||
<th>
|
||||
<i class="fa fa-arrow-down"></i>
|
||||
</th>
|
||||
<th>
|
||||
<i class="fa fa fa-check"></i>
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{% for connector in connectors %}
|
||||
{% if connector.data|length > 0 or connector.is_more %}
|
||||
<th colspan="8">{{ connector.title }}</th>
|
||||
{% endif %}
|
||||
|
||||
{% for torrent in connector.data %}
|
||||
<tr class="{{ torrent.class }}">
|
||||
<td>
|
||||
{{ torrent.vf|flagify }}
|
||||
<a href="{{ torrent.href }}" target="_blank">
|
||||
{{ torrent.name|boldify|safe }}
|
||||
</a>
|
||||
</td>
|
||||
|
||||
{% if connector.is_light %}
|
||||
<td colspan="7">
|
||||
{{ torrent.type }}
|
||||
</td>
|
||||
{% else %}
|
||||
<td>
|
||||
{{ torrent.comment|safe }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.link|safe }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.size }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.date }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.seeds }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.leechs }}
|
||||
</td>
|
||||
<td>
|
||||
{{ torrent.downloads }}
|
||||
</td>
|
||||
{% endif %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
|
||||
{% if connector.is_more %}
|
||||
<tr>
|
||||
<th colspan="8">
|
||||
<a href="{{ connector.get_full_search_url() }}" target="_blank">More ...</a>
|
||||
</th>
|
||||
</tr>
|
||||
{% endif %}
|
||||
|
||||
{% if connector.on_error %}
|
||||
<tr class="is-danger">
|
||||
<th colspan="8" class="error">
|
||||
Error, can't grab data from {{ connector.title }}
|
||||
<a href="{{ connector.get_full_search_url() }}" target="_blank">Go to the website -></a>
|
||||
</th>
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
{% endblock %}
|
|
@ -1,42 +0,0 @@
|
|||
import re
|
||||
from datetime import datetime
|
||||
from dateparser import parse
|
||||
|
||||
from .config import BLACKLIST_WORDS, DB_ENABLED
|
||||
|
||||
|
||||
def link_exist_in_db(href):
|
||||
if DB_ENABLED:
|
||||
from .models import AnimeLink
|
||||
return AnimeLink.query.filter_by(link=href).first()
|
||||
return False
|
||||
|
||||
|
||||
def parse_date(str_to_parse, date_format=''):
|
||||
if str_to_parse is None:
|
||||
date_to_format = datetime.fromtimestamp(0)
|
||||
elif isinstance(str_to_parse, datetime):
|
||||
date_to_format = str_to_parse
|
||||
else:
|
||||
date = parse(str_to_parse, date_formats=[date_format])
|
||||
if date:
|
||||
date_to_format = date
|
||||
else:
|
||||
date_to_format = datetime.fromtimestamp(0)
|
||||
|
||||
return date_to_format.isoformat(' ', 'minutes')
|
||||
|
||||
|
||||
def boldify(str_to_replace, keyword):
|
||||
if keyword:
|
||||
return re.sub('(%s)' % keyword, r'<b>\1</b>', str_to_replace, flags=re.IGNORECASE)
|
||||
else:
|
||||
return str_to_replace
|
||||
|
||||
|
||||
def check_blacklist_words(url):
|
||||
return any(word.lower() in url.lower() for word in BLACKLIST_WORDS)
|
||||
|
||||
|
||||
def check_if_vf(title):
|
||||
return any(word.lower() in title.lower() for word in ['vf', 'multi', 'french'])
|
|
@ -0,0 +1,49 @@
|
|||
import asyncio
|
||||
from datetime import datetime
|
||||
|
||||
from flask import Flask, redirect, render_template, request, url_for
|
||||
|
||||
from pynyaata2.bridge import search_all
|
||||
from pynyaata2.translations import current_lang, i18n
|
||||
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def utility_processor():
|
||||
current_date = datetime.now()
|
||||
apocalypse_left = (
|
||||
datetime(current_date.year + 5 - (current_date.year % 5), 6, 4, 0, 1)
|
||||
- current_date
|
||||
)
|
||||
|
||||
return dict(
|
||||
_=i18n,
|
||||
current_lang=current_lang(),
|
||||
apocalypse_left=apocalypse_left,
|
||||
)
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def index():
|
||||
return render_template("index.html.j2")
|
||||
|
||||
|
||||
@app.route("/search")
|
||||
def search():
|
||||
query = request.args.get("q")
|
||||
if not query:
|
||||
return redirect(url_for("index"))
|
||||
|
||||
torrents = asyncio.run(search_all(query))
|
||||
|
||||
return render_template("search.html.j2", torrents=torrents)
|
||||
|
||||
|
||||
def run():
|
||||
app.run("0.0.0.0")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
|
@ -0,0 +1,3 @@
|
|||
from pynyaata2 import run
|
||||
|
||||
run()
|
|
@ -0,0 +1,25 @@
|
|||
from asyncio import create_task, gather
|
||||
from typing import List
|
||||
|
||||
from pynyaata2.bridge.animeultime import AnimeUltime
|
||||
from pynyaata2.bridge.nyaa import EraiRaws, Nyaa
|
||||
from pynyaata2.bridge.yggtorrent import YggAnimation, YggTorrent
|
||||
from pynyaata2.types import Bridge, RemoteFile
|
||||
|
||||
|
||||
BRIDGES: List[Bridge] = [
|
||||
Nyaa(),
|
||||
EraiRaws(),
|
||||
YggTorrent(),
|
||||
YggAnimation(),
|
||||
AnimeUltime(),
|
||||
]
|
||||
|
||||
|
||||
async def search_all(query: str = "", page: int = 1) -> List[RemoteFile]:
|
||||
tasks = []
|
||||
|
||||
for bridge in BRIDGES:
|
||||
tasks.append(create_task(bridge.search(query, page)))
|
||||
|
||||
return await gather(*tasks, return_exceptions=True)
|
|
@ -0,0 +1,133 @@
|
|||
from datetime import datetime, timedelta
|
||||
from typing import List
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from pydantic import HttpUrl, parse_obj_as
|
||||
|
||||
from pynyaata2.cache import cache_data
|
||||
from pynyaata2.filters import filter_data
|
||||
from pynyaata2.types import Bridge, Color, RemoteFile, async_wrap
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
MONTHS = [
|
||||
"Janvier",
|
||||
"Février",
|
||||
"Mars",
|
||||
"Avril",
|
||||
"Mai",
|
||||
"Juin",
|
||||
"Juillet",
|
||||
"Août",
|
||||
"Septembre",
|
||||
"Octobre",
|
||||
"Novembre",
|
||||
"Décembre",
|
||||
]
|
||||
|
||||
|
||||
class AnimeUltime(Bridge):
|
||||
color = Color.WARNING
|
||||
title = "Anime-Ultime"
|
||||
base_url = parse_obj_as(HttpUrl, "http://www.anime-ultime.net")
|
||||
favicon = parse_obj_as(HttpUrl, f"{base_url}/favicon.ico")
|
||||
|
||||
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
|
||||
try:
|
||||
page_date = datetime.now() - timedelta((page - 1) * 365 / 12)
|
||||
except OverflowError:
|
||||
page_date = datetime.fromtimestamp(0)
|
||||
|
||||
return parse_obj_as(
|
||||
HttpUrl,
|
||||
(
|
||||
f"{self.base_url}/"
|
||||
f"{'search' if query else 'history'}-0-1/"
|
||||
f"{page_date.strftime('%m%Y') if not query else ''}"
|
||||
),
|
||||
)
|
||||
|
||||
@async_wrap
|
||||
@cache_data
|
||||
@filter_data
|
||||
def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:
|
||||
response = (
|
||||
requests.post(self.search_url(query, page), {"search": query})
|
||||
if query
|
||||
else requests.get(self.search_url(query, page))
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise requests.HTTPError(response)
|
||||
|
||||
torrents: List[RemoteFile] = []
|
||||
html = BeautifulSoup(response.content, "html.parser")
|
||||
title = html.select_one("div.title")
|
||||
titre = html.select_one("div.titre")
|
||||
history = html.select_one("h1")
|
||||
player = html.select_one("div.AUVideoPlayer")
|
||||
tables = html.select("table.jtable")
|
||||
|
||||
if title and "Recherche" in title.get_text():
|
||||
trs = html.select("table.jtable tr")
|
||||
|
||||
for i, tr in enumerate(trs):
|
||||
if not i:
|
||||
continue
|
||||
|
||||
tds = tr.find_all("td")
|
||||
|
||||
if tds:
|
||||
torrents.append(
|
||||
RemoteFile(
|
||||
bridge=self.__class__.__name__,
|
||||
id=tds[0].a["href"].split("/")[1].split("-")[0],
|
||||
category=tds[1].get_text(),
|
||||
name=tds[0].get_text(),
|
||||
link=f"{self.base_url}/{tds[0].a['href']}",
|
||||
)
|
||||
)
|
||||
elif history and "Historique" in history.get_text():
|
||||
h3s = html.findAll("h3")
|
||||
|
||||
for i, table in enumerate(tables):
|
||||
for j, tr in enumerate(table.find_all("tr")):
|
||||
if not j:
|
||||
continue
|
||||
|
||||
tds = tr.find_all("td")
|
||||
|
||||
if tds[0].a["href"] != "#":
|
||||
date = h3s[i].get_text()[:-3].split(" ")
|
||||
|
||||
torrents.append(
|
||||
RemoteFile(
|
||||
bridge=self.__class__.__name__,
|
||||
id=tds[0].a["href"].split("/")[-2],
|
||||
category=tds[4].get_text(),
|
||||
name=tds[0].get_text(),
|
||||
link=f"{self.base_url}/{tds[0].a['href']}",
|
||||
date=datetime(
|
||||
int(date[3]),
|
||||
MONTHS.index(date[2]) + 1,
|
||||
int(date[1]),
|
||||
),
|
||||
)
|
||||
)
|
||||
elif player and titre and history:
|
||||
torrents.append(
|
||||
RemoteFile(
|
||||
bridge=self.__class__.__name__,
|
||||
id=player["data-serie"],
|
||||
category=titre.get_text().split("(")[0].strip(),
|
||||
name=history.get_text(),
|
||||
link=f"{self.base_url}/file-0-1/{player['data-serie']}",
|
||||
date=datetime.strptime(
|
||||
tables[0].find_all("tr")[1].find_all("td")[1].get_text(),
|
||||
"%d/%m/%y",
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
return torrents
|
|
@ -0,0 +1,105 @@
|
|||
from datetime import datetime
|
||||
from os import getenv
|
||||
from typing import List
|
||||
from urllib import parse
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from pydantic import HttpUrl, parse_obj_as
|
||||
|
||||
from pynyaata2.cache import cache_data
|
||||
from pynyaata2.filters import filter_data
|
||||
from pynyaata2.types import Bridge, Color, RemoteFile, async_wrap
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
VF_WORDS = getenv("VF_WORDS", "vf,vostfr,multi,fre").split(",")
|
||||
|
||||
|
||||
class Nyaa(Bridge):
|
||||
color = Color.INFO
|
||||
title = "Nyaa"
|
||||
base_url = parse_obj_as(HttpUrl, "https://nyaa.si")
|
||||
favicon = parse_obj_as(HttpUrl, f"{base_url}/static/favicon.png")
|
||||
|
||||
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
|
||||
to_query = "|".join(map(lambda word: f"({query} {word})", VF_WORDS))
|
||||
params = parse.urlencode(
|
||||
{
|
||||
"f": 0,
|
||||
"c": "1_3",
|
||||
"q": to_query,
|
||||
"s": "size" if query else "id",
|
||||
"o": "desc",
|
||||
"p": page,
|
||||
}
|
||||
)
|
||||
|
||||
return parse_obj_as(HttpUrl, f"{self.base_url}?{params}")
|
||||
|
||||
@async_wrap
|
||||
@cache_data
|
||||
@filter_data
|
||||
def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:
|
||||
response = requests.get(self.search_url(query, page))
|
||||
|
||||
if response.status_code != 200:
|
||||
raise requests.HTTPError(response)
|
||||
|
||||
torrents: List[RemoteFile] = []
|
||||
html = BeautifulSoup(response.content, "html.parser")
|
||||
trs = html.select("table.torrent-list tr")
|
||||
|
||||
for i, tr in enumerate(trs):
|
||||
if not i:
|
||||
continue
|
||||
|
||||
tds = tr.find_all("td")
|
||||
urls = tds[1].find_all("a")
|
||||
links = tds[2].find_all("a")
|
||||
|
||||
nb_pages = html.select("ul.pagination li")[-2]
|
||||
current = nb_pages.select_one("span")
|
||||
if current:
|
||||
current.decompose()
|
||||
|
||||
torrents.append(
|
||||
RemoteFile(
|
||||
bridge=self.__class__.__name__,
|
||||
id=urls[1 if len(urls) > 1 else 0]["href"].split("/")[-1],
|
||||
category=tds[0].a["title"],
|
||||
color=Color[tr["class"][0].upper()],
|
||||
name=urls[1 if len(urls) > 1 else 0].get_text(),
|
||||
link=f"{self.base_url}{urls[1 if len(urls) > 1 else 0]['href']}",
|
||||
comment=urls[0].get_text() if len(urls) > 1 else 0,
|
||||
comment_url=f"{self.base_url}{urls[0]['href']}",
|
||||
magnet=links[1]["href"],
|
||||
torrent=f"{self.base_url}{links[0]['href']}",
|
||||
size=tds[3].get_text(),
|
||||
date=datetime.fromtimestamp(int(tds[4]["data-timestamp"])),
|
||||
seeds=tds[5].get_text(),
|
||||
leechs=tds[6].get_text(),
|
||||
downloads=tds[7].get_text(),
|
||||
nb_pages=nb_pages.get_text(),
|
||||
)
|
||||
)
|
||||
|
||||
return torrents
|
||||
|
||||
|
||||
class EraiRaws(Nyaa):
|
||||
title = "Erai-raws"
|
||||
|
||||
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
|
||||
params = parse.urlencode(
|
||||
{
|
||||
"f": 0,
|
||||
"c": "1_2",
|
||||
"q": f"{query} fre",
|
||||
"s": "size" if query else "id",
|
||||
"o": "desc",
|
||||
"p": page,
|
||||
}
|
||||
)
|
||||
|
||||
return parse_obj_as(HttpUrl, f"{self.base_url}/user/Erai-raws?{params}")
|
|
@ -0,0 +1,91 @@
|
|||
from os import getenv
|
||||
from typing import List
|
||||
from urllib import parse
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from pydantic import HttpUrl, parse_obj_as
|
||||
|
||||
from pynyaata2.cache import cache_data
|
||||
from pynyaata2.filters import filter_data
|
||||
from pynyaata2.session import requests
|
||||
from pynyaata2.types import Bridge, Color, RemoteFile, async_wrap
|
||||
|
||||
from requests import HTTPError
|
||||
|
||||
|
||||
YGG_IP = getenv("YGG_IP")
|
||||
YGG_DOMAIN = "www3.yggtorrent.do"
|
||||
|
||||
|
||||
class YggTorrent(Bridge):
|
||||
color = Color.SUCCESS
|
||||
title = "YggTorrent"
|
||||
base_url = parse_obj_as(HttpUrl, f"https://{YGG_DOMAIN}")
|
||||
hidden_url = parse_obj_as(HttpUrl, f"http://{YGG_IP}") if YGG_IP else base_url
|
||||
favicon = parse_obj_as(HttpUrl, f"{base_url}/favicon.ico")
|
||||
category = "Animation Série"
|
||||
sub_category = 2179
|
||||
|
||||
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
|
||||
params = parse.urlencode(
|
||||
{
|
||||
"name": query,
|
||||
"category": 2145,
|
||||
"sub_category": self.sub_category,
|
||||
"do": "search",
|
||||
"order": "desc",
|
||||
"sort": "size" if query else "publish_date",
|
||||
}
|
||||
)
|
||||
|
||||
return parse_obj_as(HttpUrl, f"{self.base_url}/engine/search?{params}")
|
||||
|
||||
@async_wrap
|
||||
@cache_data
|
||||
@filter_data
|
||||
def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:
|
||||
search_url = self.search_url(query, page)
|
||||
response = requests.get(
|
||||
f"{self.hidden_url}/{search_url.path}?{search_url.query}",
|
||||
headers={"Host": YGG_DOMAIN},
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise HTTPError(response)
|
||||
|
||||
torrents: List[RemoteFile] = []
|
||||
html = BeautifulSoup(response.content, "html.parser")
|
||||
trs = html.select("table.table tr")
|
||||
|
||||
for i, tr in enumerate(trs):
|
||||
if not i:
|
||||
continue
|
||||
|
||||
tds = tr.find_all("td")
|
||||
|
||||
torrents.append(
|
||||
RemoteFile(
|
||||
bridge=self.__class__.__name__,
|
||||
id=tds[0].div.get_text(),
|
||||
category=self.category,
|
||||
name=tds[1].get_text(),
|
||||
link=tds[1].a["href"],
|
||||
comment=tds[3].get_text(),
|
||||
comment_url=f"{tds[1].a['href']}#comm",
|
||||
torrent=f"{self.base_url}/engine/download_torrent?id={tds[0].div.get_text()}",
|
||||
size=tds[5].get_text(),
|
||||
date=tds[4].div.get_text(),
|
||||
seeds=tds[7].get_text(),
|
||||
leechs=tds[8].get_text(),
|
||||
downloads=tds[6].get_text(),
|
||||
nb_pages=html.select("ul.pagination li")[-2].get_text(),
|
||||
)
|
||||
)
|
||||
|
||||
return torrents
|
||||
|
||||
|
||||
class YggAnimation(YggTorrent):
|
||||
title = "YggAnimation"
|
||||
category = "Animation Série"
|
||||
sub_category = 2179
|
|
@ -0,0 +1,39 @@
|
|||
from functools import wraps
|
||||
from logging import error
|
||||
from os import getenv
|
||||
from typing import Optional
|
||||
|
||||
from pynyaata2.cache.simple import SimpleCache
|
||||
from pynyaata2.types import Bridge, Cache
|
||||
|
||||
from redis import RedisError
|
||||
|
||||
|
||||
REDIS_URL: Optional[str] = getenv("REDIS_URL")
|
||||
|
||||
client: Cache = SimpleCache()
|
||||
|
||||
if REDIS_URL:
|
||||
try:
|
||||
from pynyaata2.cache.redis import RedisCache
|
||||
|
||||
client = RedisCache()
|
||||
except RedisError as e:
|
||||
error(e)
|
||||
|
||||
|
||||
def cache_data(f):
|
||||
@wraps(f)
|
||||
def wrapper(bridge: Bridge, query: str = "", page: int = 1):
|
||||
key = f"pynyaata.{bridge.__class__.__name__}.{f.__name__}.{query}.{page}"
|
||||
cached = client.get(key)
|
||||
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
reals = f(bridge, query, page)
|
||||
client.set(key, reals)
|
||||
|
||||
return reals
|
||||
|
||||
return wrapper
|
|
@ -0,0 +1,25 @@
|
|||
from json import dumps, loads
|
||||
from os import getenv
|
||||
from typing import List, Optional
|
||||
|
||||
from pynyaata2.types import Cache, RemoteFile
|
||||
|
||||
from redis import ConnectionError, Redis
|
||||
|
||||
|
||||
CACHE_TIMEOUT = int(getenv("CACHE_TIMEOUT", 60 * 60))
|
||||
REDIS_URL: Optional[str] = getenv("REDIS_URL")
|
||||
|
||||
if not REDIS_URL:
|
||||
raise ConnectionError(f"Invalid REDIS_URL: {REDIS_URL}")
|
||||
|
||||
client = Redis.from_url(REDIS_URL)
|
||||
|
||||
|
||||
class RedisCache(Cache):
|
||||
def get(self, key: str) -> Optional[List[RemoteFile]]:
|
||||
data = client.get(key)
|
||||
return loads(str(data)) if data else None
|
||||
|
||||
def set(self, key: str, data: List[RemoteFile]):
|
||||
return client.set(key, dumps(data), CACHE_TIMEOUT)
|
|
@ -0,0 +1,25 @@
|
|||
from datetime import datetime, timedelta
|
||||
from os import getenv
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from pynyaata2.types import Cache, RemoteFile
|
||||
|
||||
|
||||
CACHE_TIMEOUT = int(getenv("CACHE_TIMEOUT", 60 * 60))
|
||||
CACHE_DATA: Dict[str, Tuple[List[RemoteFile], datetime]] = {}
|
||||
|
||||
|
||||
class SimpleCache(Cache):
|
||||
def get(self, key: str) -> Optional[List[RemoteFile]]:
|
||||
if key in CACHE_DATA:
|
||||
data, timeout = CACHE_DATA[key]
|
||||
|
||||
if datetime.now() < timeout + timedelta(seconds=CACHE_TIMEOUT):
|
||||
return data
|
||||
else:
|
||||
CACHE_DATA.pop(key)
|
||||
|
||||
return None
|
||||
|
||||
def set(self, key: str, data: List[RemoteFile]):
|
||||
CACHE_DATA[key] = (data, datetime.now())
|
|
@ -0,0 +1,81 @@
|
|||
from functools import wraps
|
||||
from os import getenv
|
||||
from typing import List
|
||||
|
||||
from pynyaata2.types import Color, RemoteFile
|
||||
|
||||
|
||||
def duplicate(remotes: List[RemoteFile]) -> List[RemoteFile]:
|
||||
processed_ids: List[int] = []
|
||||
dedup_remotes: List[RemoteFile] = []
|
||||
|
||||
for remote in remotes:
|
||||
if remote.id not in processed_ids:
|
||||
dedup_remotes.append(remote)
|
||||
|
||||
processed_ids.append(remote.id)
|
||||
|
||||
return dedup_remotes
|
||||
|
||||
|
||||
def inactive(remotes: List[RemoteFile]) -> List[RemoteFile]:
|
||||
return list(
|
||||
filter(
|
||||
lambda remote: remote.seeds != 0 or remote.downloads != 0,
|
||||
remotes,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def blacklist(remotes: List[RemoteFile]) -> List[RemoteFile]:
|
||||
BLACKLIST_WORDS = getenv("BLACKLIST_WORDS", "")
|
||||
|
||||
if not BLACKLIST_WORDS:
|
||||
return remotes
|
||||
else:
|
||||
return list(
|
||||
filter(
|
||||
lambda remote: not any(
|
||||
word and word in remote.name.lower()
|
||||
for word in BLACKLIST_WORDS.split(",")
|
||||
),
|
||||
remotes,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def danger(remotes: List[RemoteFile]) -> List[RemoteFile]:
|
||||
return list(
|
||||
filter(
|
||||
lambda remote: remote.color != Color.DANGER,
|
||||
remotes,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def trusted(remotes: List[RemoteFile]) -> List[RemoteFile]:
|
||||
TRUSTED_WORDS = getenv("TRUSTED_WORDS", "")
|
||||
|
||||
for remote in remotes:
|
||||
if any(
|
||||
word and word in remote.name.lower() for word in TRUSTED_WORDS.split(",")
|
||||
):
|
||||
remote.color = Color.PRIMARY
|
||||
|
||||
return remotes
|
||||
|
||||
|
||||
def filter_data(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
ret = f(*args, **kwargs)
|
||||
|
||||
ret = duplicate(ret)
|
||||
ret = inactive(ret)
|
||||
ret = blacklist(ret)
|
||||
ret = danger(ret)
|
||||
ret = trusted(ret)
|
||||
|
||||
return ret
|
||||
|
||||
return wrapper
|
|
@ -0,0 +1,77 @@
|
|||
from io import BytesIO
|
||||
from os import getenv
|
||||
from urllib import parse
|
||||
|
||||
from charset_normalizer import detect
|
||||
from requests import RequestException, Response, Session, post
|
||||
|
||||
|
||||
CLOUDPROXY_ENDPOINT = getenv("CLOUDPROXY_ENDPOINT")
|
||||
|
||||
|
||||
class FlareRequests(Session):
|
||||
def request(self, method, url, params=None, data=None, **kwargs):
|
||||
if not CLOUDPROXY_ENDPOINT:
|
||||
return super().request(method, url, params, data, **kwargs)
|
||||
|
||||
sessions = post(CLOUDPROXY_ENDPOINT, json={"cmd": "sessions.list"}).json()
|
||||
|
||||
if "sessions" in sessions and len(sessions["sessions"]) > 0:
|
||||
FLARE_SESSION = sessions["sessions"][0]
|
||||
else:
|
||||
response = post(CLOUDPROXY_ENDPOINT, json={"cmd": "sessions.create"})
|
||||
session = response.json()
|
||||
|
||||
if "session" in session:
|
||||
FLARE_SESSION = session["session"]
|
||||
else:
|
||||
raise RequestException(response)
|
||||
|
||||
if params:
|
||||
url += "&" if len(url.split("?")) > 1 else "?"
|
||||
url = f"{url}{parse.urlencode(params)}"
|
||||
|
||||
post_data = {
|
||||
"cmd": f"request.{method.lower()}",
|
||||
"session": FLARE_SESSION,
|
||||
"url": url,
|
||||
}
|
||||
|
||||
if data:
|
||||
post_data["postData"] = parse.urlencode(data)
|
||||
|
||||
try:
|
||||
response = post(
|
||||
CLOUDPROXY_ENDPOINT,
|
||||
json=post_data,
|
||||
)
|
||||
|
||||
content = response.json()
|
||||
|
||||
if "solution" in content and content["solution"]:
|
||||
solution = content["solution"]
|
||||
raw = solution["response"].encode()
|
||||
encoding = detect(raw)
|
||||
|
||||
resolved = Response()
|
||||
resolved.status_code = solution["status"]
|
||||
resolved.headers = solution["headers"]
|
||||
resolved.raw = BytesIO(raw)
|
||||
resolved.url = url
|
||||
resolved.encoding = encoding["encoding"]
|
||||
resolved.reason = content["status"]
|
||||
resolved.cookies = solution["cookies"]
|
||||
|
||||
return resolved
|
||||
|
||||
raise RequestException(content["message"], response=response)
|
||||
except RequestException:
|
||||
session = post(
|
||||
CLOUDPROXY_ENDPOINT,
|
||||
json={"cmd": "sessions.destroy", "session": FLARE_SESSION},
|
||||
)
|
||||
|
||||
raise RequestException(content["message"], response=response)
|
||||
|
||||
|
||||
requests = FlareRequests()
|
|
@ -0,0 +1,7 @@
|
|||
.title {
|
||||
letter-spacing: 0.5rem;
|
||||
}
|
||||
|
||||
.navbar-menu {
|
||||
box-shadow: none;
|
||||
}
|
Before Width: | Height: | Size: 434 KiB After Width: | Height: | Size: 434 KiB |
|
@ -0,0 +1,20 @@
|
|||
{% extends "layout.html.j2" %}
|
||||
{% block body %}
|
||||
<section class="hero is-fullheight-with-navbar">
|
||||
<div class="hero-body m-auto is-flex-direction-column is-justify-content-center">
|
||||
<p class="title mb-6 is-size-1">{{ _("𝚷😼た") }}</p>
|
||||
<form action="{{ url_for('search') }}" class="subtitle">
|
||||
<div class="field has-addons">
|
||||
<div class="control">
|
||||
{{ search_form.q(placeholder=_("Search animes ..."), class="input", value=request.args.get("q", "")) }}
|
||||
</div>
|
||||
<div class="control">
|
||||
<button type="submit" class="button">
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</section>
|
||||
{% endblock body %}
|
|
@ -0,0 +1,75 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="{{ current_lang }}">
|
||||
<head>
|
||||
<meta charset="utf-8"/>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1"/>
|
||||
{% block head %}
|
||||
<meta name="description" content="" />
|
||||
<meta name="keywords" content="" />
|
||||
<title>{{ _("PyNyaaTa") }}</title>
|
||||
{% endblock head %}
|
||||
<link rel="stylesheet"
|
||||
href="{{ url_for('static', filename='css/bulma.min.css') }}"/>
|
||||
<link rel="stylesheet"
|
||||
href="{{ url_for('static', filename='css/bulma-prefers-dark.min.css') }}"/>
|
||||
<link rel="stylesheet"
|
||||
href="{{ url_for('static', filename='css/bulma-tooltip.min.css') }}"/>
|
||||
<link rel="stylesheet"
|
||||
href="{{ url_for('static', filename='css/font-awesome.min.css') }}"/>
|
||||
<link rel="stylesheet"
|
||||
href="{{ url_for('static', filename='css/styles.css') }}"/>
|
||||
</head>
|
||||
<body>
|
||||
{% if request.endpoint != "index" %}
|
||||
<nav class="navbar" role="navigation" aria-label="main navigation">
|
||||
<div class="navbar-brand">
|
||||
<a class="navbar-item" href="{{ url_for('index') }}">{{ _("𝚷😼た") }}</a>
|
||||
</div>
|
||||
<div class="navbar-start">
|
||||
{% block links %}
|
||||
{% endblock links %}
|
||||
</div>
|
||||
<div class="navbar-menu">
|
||||
<div class="navbar-end">
|
||||
<form action="{{ url_for('search') }}" class="navbar-item">
|
||||
<div class="field has-addons">
|
||||
<div class="control">
|
||||
{{ search_form.q(placeholder=_("Search animes ..."), class="input", value=request.args.get("q", "")) }}
|
||||
</div>
|
||||
<div class="control">
|
||||
<button type="submit" class="button">
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
{% endif %}
|
||||
{% block body %}
|
||||
{% endblock body %}
|
||||
<footer class="has-text-centered is-size-7">
|
||||
<span class="mr-2">
|
||||
<i class="fa fa-user-plus"></i>
|
||||
<a href="https://discord.com/users/133305654512320513" target="_blank">{{ _("Discord") }}</a>
|
||||
</span>
|
||||
<span class="mr-2">
|
||||
<i class="fa fa-book"></i>
|
||||
{# djlint:off H022 #}<a href="http://www.wtfpl.net" target="_blank">{{ _("Licence") }}</a>{# djlint:on #}
|
||||
</span>
|
||||
<span class="mr-2">
|
||||
<i class="fa fa-code-fork"></i>
|
||||
<a href="https://git.crystalyx.net/Xefir/PyNyaaTa" target="_blank">{{ _("Code") }}</a>
|
||||
</span>
|
||||
<span class="mr-2">
|
||||
<i class="fa fa-cubes"></i>
|
||||
<a href="https://hub.docker.com/r/xefir/pynyaata" target="_blank">{{ _("Docker") }}</a>
|
||||
</span>
|
||||
<br />
|
||||
<i class="fa fa-heartbeat"></i>
|
||||
{{ _("Next castastrophic life failure in about %s days", apocalypse_left.days) }}
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,5 @@
|
|||
{% extends "layout.html.j2" %}
|
||||
|
||||
{% block body %}
|
||||
|
||||
{% endblock body %}
|
|
@ -0,0 +1,27 @@
|
|||
from json import load
|
||||
from os import path
|
||||
|
||||
from flask import request
|
||||
|
||||
|
||||
CATALOG_CACHE = {}
|
||||
|
||||
|
||||
def current_lang():
|
||||
return request.accept_languages.best_match(["en", "fr"])
|
||||
|
||||
|
||||
def i18n(string: str, *args: str) -> str:
|
||||
lang = current_lang()
|
||||
|
||||
if lang != "en" and lang not in CATALOG_CACHE:
|
||||
catalog_file = f"{path.dirname(__file__)}/{lang}.json"
|
||||
if path.exists(catalog_file):
|
||||
with open(catalog_file) as catalog_json:
|
||||
catalog = load(catalog_json)
|
||||
CATALOG_CACHE[lang] = catalog
|
||||
|
||||
if lang in CATALOG_CACHE and string in CATALOG_CACHE[lang]:
|
||||
return CATALOG_CACHE[lang][string] % args
|
||||
|
||||
return string % args
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"Search animes ...": "Recherche d'animes ...",
|
||||
"Next castastrophic life failure in about %s days": "%s jours avant la prochaine catastrophe"
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from asyncio import get_event_loop
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from functools import partial, wraps
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel, ByteSize, HttpUrl
|
||||
|
||||
|
||||
class Color(str, Enum):
|
||||
WHITE = "white"
|
||||
BLACK = "black"
|
||||
LIGHT = "light"
|
||||
DARK = "dark"
|
||||
PRIMARY = "primary"
|
||||
LINK = "link"
|
||||
INFO = "info"
|
||||
SUCCESS = "success"
|
||||
WARNING = "warning"
|
||||
DANGER = "danger"
|
||||
DEFAULT = "default"
|
||||
|
||||
|
||||
class RemoteFile(BaseModel):
|
||||
bridge: str
|
||||
id: int
|
||||
category: str
|
||||
color: Optional[Color]
|
||||
name: str
|
||||
link: HttpUrl
|
||||
comment: int = 0
|
||||
comment_url: Optional[HttpUrl]
|
||||
magnet: Optional[str]
|
||||
torrent: Optional[HttpUrl]
|
||||
size: Optional[ByteSize]
|
||||
date: Optional[datetime]
|
||||
seeds: Optional[int]
|
||||
leechs: Optional[int]
|
||||
downloads: Optional[int]
|
||||
nb_pages: int = 1
|
||||
|
||||
|
||||
class Bridge(BaseModel, ABC):
|
||||
color: Color
|
||||
title: str
|
||||
favicon: HttpUrl
|
||||
base_url: HttpUrl
|
||||
|
||||
@abstractmethod
|
||||
def search_url(self, query: str = "", page: int = 1) -> HttpUrl:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:
|
||||
pass
|
||||
|
||||
|
||||
class Cache(ABC):
|
||||
@abstractmethod
|
||||
def get(self, key: str) -> Optional[List[RemoteFile]]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set(self, key: str, data: List[RemoteFile]):
|
||||
pass
|
||||
|
||||
|
||||
def async_wrap(func):
|
||||
@wraps(func)
|
||||
async def run(*args, loop=None, executor=None, **kwargs):
|
||||
if loop is None:
|
||||
loop = get_event_loop()
|
||||
|
||||
pfunc = partial(func, *args, **kwargs)
|
||||
return await loop.run_in_executor(executor, pfunc)
|
||||
|
||||
return run
|
|
@ -0,0 +1,53 @@
|
|||
[tool.poetry]
|
||||
name = "pynyaata2"
|
||||
version = "2.0.0"
|
||||
description = "π 😼た, Xéfir's personal anime torrent search engine"
|
||||
authors = ["Xéfir Destiny"]
|
||||
license = "WTFPL"
|
||||
readme = "README.md"
|
||||
homepage = "https://nyaa.crystalyx.net/"
|
||||
repository = "https://git.crystalyx.net/Xefir/PyNyaaTa"
|
||||
|
||||
|
||||
[tool.poetry.scripts]
|
||||
pynyaata = 'pynyaata:run'
|
||||
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8.1"
|
||||
beautifulsoup4 = "^4.12.2"
|
||||
charset-normalizer = "^3.1.0"
|
||||
flask = "^2.3.2"
|
||||
pydantic = "^1.10.8"
|
||||
redis = "^4.5.5"
|
||||
requests = "^2.31.0"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^23.3.0"
|
||||
flake8-alphabetize = "^0.0.21"
|
||||
flake8-black = "^0.3.6"
|
||||
mypy = "^1.3.0"
|
||||
pytest = "^7.3.1"
|
||||
pytest-asyncio = "^0.21.0"
|
||||
requests-mock = "^1.10.0"
|
||||
types-beautifulsoup4 = "^4.12.0.5"
|
||||
types-redis = "^4.5.5.2"
|
||||
types-requests = "^2.31.0.1"
|
||||
djlint = "^1.30.2"
|
||||
flake8 = "^6.0.0"
|
||||
pydantic-factories = "^1.17.3"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
||||
[tool.djlint]
|
||||
extension = "j2"
|
||||
profile = "jinja"
|
||||
|
||||
|
||||
[tool.mypy]
|
||||
plugins = ["pydantic.mypy"]
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json"
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
Flask==2.3.3
|
||||
Flask-SQLAlchemy==2.5.1
|
||||
Flask-HTTPAuth==4.8.0
|
||||
Flask-WTF==1.1.2
|
||||
WTForms==2.3.3
|
||||
PyMySQL==1.1.0
|
||||
pg8000==1.30.2
|
||||
requests==2.31.0
|
||||
charset-normalizer==3.2.0
|
||||
beautifulsoup4==4.12.2
|
||||
python-dotenv==1.0.0
|
||||
dateparser==1.1.8
|
||||
redis==5.0.1
|
||||
transmission-rpc==7.0.1
|
5
run.py
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
from pynyaata import run
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
31
setup.py
|
@ -1,31 +0,0 @@
|
|||
from datetime import datetime
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
with open("README.md") as readme_file:
|
||||
long_description = readme_file.read()
|
||||
|
||||
with open("requirements.txt") as requirements_file:
|
||||
requirements = requirements_file.read().splitlines()
|
||||
|
||||
setup(
|
||||
name="PyNyaaTa",
|
||||
version=datetime.now().strftime("%Y%m%d%H%M"),
|
||||
author="Xéfir Destiny",
|
||||
author_email="xefir@crystalyx.net",
|
||||
description="π 😼た, Xéfir's personal animes torrent search engine",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
url="https://git.crystalyx.net/Xefir/PyNyaaTa",
|
||||
packages=find_packages(),
|
||||
install_requires=requirements,
|
||||
include_package_data=True,
|
||||
entry_points={
|
||||
"console_scripts": ["pynyaata=pynyaata:run"],
|
||||
},
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 3",
|
||||
"Operating System :: OS Independent",
|
||||
],
|
||||
python_requires=">=3.5",
|
||||
)
|
|
@ -0,0 +1,174 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
from pynyaata2.bridge.animeultime import AnimeUltime
|
||||
from pynyaata2.types import RemoteFile
|
||||
|
||||
from pytest import mark
|
||||
import requests
|
||||
from requests_mock import Mocker
|
||||
|
||||
|
||||
def test_search_url():
|
||||
previous_month = datetime.now() - timedelta(days=30)
|
||||
|
||||
assert (
|
||||
AnimeUltime().search_url()
|
||||
== f"http://www.anime-ultime.net/history-0-1/{datetime.now().strftime('%m%Y')}"
|
||||
)
|
||||
|
||||
assert (
|
||||
AnimeUltime().search_url("", 2)
|
||||
== f"http://www.anime-ultime.net/history-0-1/{previous_month.strftime('%m%Y')}"
|
||||
)
|
||||
|
||||
assert AnimeUltime().search_url("test") == "http://www.anime-ultime.net/search-0-1/"
|
||||
|
||||
assert (
|
||||
AnimeUltime().search_url("test", 2) == "http://www.anime-ultime.net/search-0-1/"
|
||||
)
|
||||
|
||||
|
||||
@mark.asyncio
|
||||
async def test_search():
|
||||
assert await AnimeUltime().search("test") == [
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=4631,
|
||||
category="OAV",
|
||||
name="Akuma no Riddle: Shousha wa Dare? Nukiuchi Test",
|
||||
link="http://www.anime-ultime.net/file-0-1/4631-Akuma-no-Riddle-Shousha-wa-Dare-Nukiuchi-Test",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=4250,
|
||||
category="OAV",
|
||||
name="Baka to Test to shoukanjuu (spécial Noël)",
|
||||
link="http://www.anime-ultime.net/file-0-1/4250-Baka-to-Test-to-shoukanjuu-special-Noel",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=3057,
|
||||
category="OAV",
|
||||
name="Baka to Test to shoukanjuu (spéciaux)",
|
||||
link="http://www.anime-ultime.net/file-0-1/3057-Baka-to-Test-to-shoukanjuu-speciaux",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=5453,
|
||||
category="OAV",
|
||||
name="Baka to Test to Shoukanjuu Mini Anime",
|
||||
link="http://www.anime-ultime.net/file-0-1/5453-Baka-to-Test-to-Shoukanjuu-Mini-Anime",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=2458,
|
||||
category="Episode",
|
||||
name="Baka to Test to shoukanjuu ni! [Saison 2]",
|
||||
link="http://www.anime-ultime.net/file-0-1/2458-Baka-to-Test-to-shoukanjuu-ni-Saison-2",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=1887,
|
||||
category="Episode",
|
||||
name="Baka to Test to shoukanjuu [Saison 1]",
|
||||
link="http://www.anime-ultime.net/file-0-1/1887-Baka-to-Test-to-shoukanjuu-Saison-1",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=2320,
|
||||
category="OAV",
|
||||
name="Baka to Test to Shoukanjuu: Matsuri",
|
||||
link="http://www.anime-ultime.net/file-0-1/2320-Baka-to-Test-to-Shoukanjuu-Matsuri",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=4463,
|
||||
category="OAV",
|
||||
name="Baka to Test to Shoukanjuu: Spinout! Sore ga Bokura no Nichijou",
|
||||
link="http://www.anime-ultime.net/file-0-1/4463-Baka-to-Test-to-Shoukanjuu-Spinout-Sore-ga-Bokura-no-Nichijou",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=5069,
|
||||
category="OAV",
|
||||
name="Shinmai Maou no Testament Burst OAV [Interdit -16 ans]",
|
||||
link="http://www.anime-ultime.net/file-0-1/5069-Shinmai-Maou-no-Testament-Burst-OAV-Interdit--16-ans",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=5487,
|
||||
category="OAV",
|
||||
name="Shinmai Maou no Testament Departures [Interdit -16 ans]",
|
||||
link="http://www.anime-ultime.net/file-0-1/5487-Shinmai-Maou-no-Testament-Departures-Interdit--16-ans",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=4830,
|
||||
category="OAV",
|
||||
name="Shinmai Maou no Testament OAD [Interdit -16 ans]",
|
||||
link="http://www.anime-ultime.net/file-0-1/4830-Shinmai-Maou-no-Testament-OAD-Interdit--16-ans",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=3449,
|
||||
category="Film",
|
||||
name="Robot Contest [J-Film]",
|
||||
link="http://www.anime-ultime.net/file-0-1/3449-Robot-Contest-J-Film",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=2879,
|
||||
category="OST",
|
||||
name="Baka to Test to shoukanjuu ni! [Saison 2]",
|
||||
link="http://www.anime-ultime.net/file-0-1/2879-Baka-to-Test-to-shoukanjuu-ni-Saison-2",
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=1926,
|
||||
category="OST",
|
||||
name="Baka to Test to shoukanjuu [Saison 1]",
|
||||
link="http://www.anime-ultime.net/file-0-1/1926-Baka-to-Test-to-shoukanjuu-Saison-1",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@mark.asyncio
|
||||
async def test_history(requests_mock: Mocker):
|
||||
requests_mock.real_http = True
|
||||
requests_mock.get(
|
||||
AnimeUltime().search_url(),
|
||||
text=requests.get("http://www.anime-ultime.net/history-0-1/042006").text,
|
||||
)
|
||||
|
||||
assert await AnimeUltime().search() == [
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=167,
|
||||
category="Film",
|
||||
name="Crayon Shin-chan - Arashi wo Yobu Appare! Sengoku Dai Kassen",
|
||||
link="http://www.anime-ultime.net/file-0-1/167/Crayon-Shin-chan---Arashi-wo-Yobu-Appare-Sengoku-Dai-Kassen-vostfr",
|
||||
date="2006-04-10 00:00:00",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@mark.asyncio
|
||||
async def test_search_one(requests_mock: Mocker):
|
||||
requests_mock.real_http = True
|
||||
requests_mock.post(
|
||||
AnimeUltime().search_url("akuma"),
|
||||
text=requests.get(
|
||||
"http://www.anime-ultime.net/file-0-1/4631/Akuma-no-Riddle-Shousha-wa-Dare-Nukiuchi-Test-vostfr"
|
||||
).text,
|
||||
)
|
||||
|
||||
assert await AnimeUltime().search("akuma") == [
|
||||
RemoteFile(
|
||||
bridge="AnimeUltime",
|
||||
id=4631,
|
||||
category="OAV",
|
||||
name="Akuma no Riddle: Shousha wa Dare? Nukiuchi Test vostfr",
|
||||
link="http://www.anime-ultime.net/file-0-1/4631",
|
||||
date="2014-12-27 00:00:00",
|
||||
),
|
||||
]
|
|
@ -0,0 +1,106 @@
|
|||
from typing import List
|
||||
|
||||
from pynyaata2.bridge.nyaa import EraiRaws, Nyaa
|
||||
from pynyaata2.types import Color, RemoteFile
|
||||
|
||||
from pytest import mark
|
||||
import requests
|
||||
from requests_mock import Mocker
|
||||
|
||||
|
||||
def normalize(remotes: List[RemoteFile]):
|
||||
for i in range(len(remotes)):
|
||||
remotes[i].seeds = 1
|
||||
remotes[i].leechs = 10
|
||||
remotes[i].downloads = 100
|
||||
|
||||
return remotes
|
||||
|
||||
|
||||
def test_search_url():
|
||||
assert (
|
||||
Nyaa().search_url()
|
||||
== "https://nyaa.si?f=0&c=1_3&q=%28+vf%29%7C%28+vostfr%29%7C%28+multi%29%7C%28+fre%29&s=id&o=desc&p=1"
|
||||
)
|
||||
|
||||
assert (
|
||||
Nyaa().search_url("", 2)
|
||||
== "https://nyaa.si?f=0&c=1_3&q=%28+vf%29%7C%28+vostfr%29%7C%28+multi%29%7C%28+fre%29&s=id&o=desc&p=2"
|
||||
)
|
||||
|
||||
assert (
|
||||
Nyaa().search_url("test")
|
||||
== "https://nyaa.si?f=0&c=1_3&q=%28test+vf%29%7C%28test+vostfr%29%7C%28test+multi%29%7C%28test+fre%29&s=size&o=desc&p=1"
|
||||
)
|
||||
|
||||
assert (
|
||||
Nyaa().search_url("test", 2)
|
||||
== "https://nyaa.si?f=0&c=1_3&q=%28test+vf%29%7C%28test+vostfr%29%7C%28test+multi%29%7C%28test+fre%29&s=size&o=desc&p=2"
|
||||
)
|
||||
|
||||
assert (
|
||||
EraiRaws().search_url()
|
||||
== "https://nyaa.si/user/Erai-raws?f=0&c=1_2&q=+fre&s=id&o=desc&p=1"
|
||||
)
|
||||
|
||||
assert (
|
||||
EraiRaws().search_url("", 2)
|
||||
== "https://nyaa.si/user/Erai-raws?f=0&c=1_2&q=+fre&s=id&o=desc&p=2"
|
||||
)
|
||||
|
||||
assert (
|
||||
EraiRaws().search_url("test")
|
||||
== "https://nyaa.si/user/Erai-raws?f=0&c=1_2&q=test+fre&s=size&o=desc&p=1"
|
||||
)
|
||||
|
||||
assert (
|
||||
EraiRaws().search_url("test", 2)
|
||||
== "https://nyaa.si/user/Erai-raws?f=0&c=1_2&q=test+fre&s=size&o=desc&p=2"
|
||||
)
|
||||
|
||||
|
||||
@mark.asyncio
|
||||
async def test_search(requests_mock: Mocker):
|
||||
requests_mock.real_http = True
|
||||
requests_mock.get(
|
||||
Nyaa().search_url(), text=requests.get("https://nyaa.si/user/Chaussette33").text
|
||||
)
|
||||
|
||||
assert normalize(await Nyaa().search()) == [
|
||||
RemoteFile(
|
||||
bridge="Nyaa",
|
||||
id=1080919,
|
||||
category="Anime - Non-English-translated",
|
||||
color=Color.DEFAULT,
|
||||
name="Heroic Age intégrale VOSTFR",
|
||||
link="https://nyaa.si/view/1080919",
|
||||
comment=4,
|
||||
comment_url="https://nyaa.si/view/1080919#comments",
|
||||
magnet="magnet:?xt=urn:btih:f610a3cd6360a36c789d1166e5efc12e3a3bb3ca&dn=Heroic%20Age%20int%C3%A9grale%20VOSTFR&tr=http%3A%2F%2Fnyaa.tracker.wf%3A7777%2Fannounce&tr=udp%3A%2F%2Fopen.stealth.si%3A80%2Fannounce&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337%2Fannounce&tr=udp%3A%2F%2Fexodus.desync.com%3A6969%2Fannounce&tr=udp%3A%2F%2Ftracker.torrent.eu.org%3A451%2Fannounce",
|
||||
torrent="https://nyaa.si/download/1080919.torrent",
|
||||
size=4509715660,
|
||||
date="2018-10-04 16:26:30",
|
||||
seeds=1,
|
||||
leechs=10,
|
||||
downloads=100,
|
||||
nb_pages=1,
|
||||
),
|
||||
RemoteFile(
|
||||
bridge="Nyaa",
|
||||
id=1080916,
|
||||
category="Anime - Non-English-translated",
|
||||
color=Color.DEFAULT,
|
||||
name="Nisekoi VOSTFR S1 + S2 1080p",
|
||||
link="https://nyaa.si/view/1080916",
|
||||
comment=3,
|
||||
comment_url="https://nyaa.si/view/1080916#comments",
|
||||
magnet="magnet:?xt=urn:btih:11c4b4d513260bf293975f1d24d8752ac5073fb1&dn=Nisekoi%20VOSTFR%20S1%20%2B%20S2%201080p&tr=http%3A%2F%2Fnyaa.tracker.wf%3A7777%2Fannounce&tr=udp%3A%2F%2Fopen.stealth.si%3A80%2Fannounce&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337%2Fannounce&tr=udp%3A%2F%2Fexodus.desync.com%3A6969%2Fannounce&tr=udp%3A%2F%2Ftracker.torrent.eu.org%3A451%2Fannounce",
|
||||
torrent="https://nyaa.si/download/1080916.torrent",
|
||||
size=9878424780,
|
||||
date="2018-10-04 16:17:56",
|
||||
seeds=1,
|
||||
leechs=10,
|
||||
downloads=100,
|
||||
nb_pages=1,
|
||||
),
|
||||
]
|
|
@ -0,0 +1,14 @@
|
|||
from pydantic_factories import ModelFactory
|
||||
|
||||
from pynyaata2.cache import client
|
||||
from pynyaata2.types import RemoteFile
|
||||
|
||||
|
||||
class RemoteFileFactory(ModelFactory[RemoteFile]):
|
||||
__model__ = RemoteFile
|
||||
|
||||
|
||||
def test_cache_data():
|
||||
remote = RemoteFileFactory.build()
|
||||
client.set("test", remote)
|
||||
assert client.get("test") == remote
|
|
@ -0,0 +1,56 @@
|
|||
from pydantic_factories import ModelFactory
|
||||
|
||||
from pynyaata2.filters import blacklist, danger, duplicate, inactive, trusted
|
||||
from pynyaata2.types import Color, RemoteFile
|
||||
|
||||
from pytest import MonkeyPatch
|
||||
|
||||
|
||||
class RemoteFileFactory(ModelFactory[RemoteFile]):
|
||||
__model__ = RemoteFile
|
||||
color = Color.DEFAULT
|
||||
|
||||
|
||||
def test_blacklist(monkeypatch: MonkeyPatch):
|
||||
monkeypatch.setenv("BLACKLIST_WORDS", "one,two")
|
||||
remotes = RemoteFileFactory.batch(10)
|
||||
remotes[0].name = "oui one non"
|
||||
remotes[1].name = "non two oui"
|
||||
|
||||
assert len(blacklist(remotes)) == 8
|
||||
|
||||
|
||||
def test_danger():
|
||||
remotes = RemoteFileFactory.batch(10)
|
||||
remotes[0].color = Color.DANGER
|
||||
|
||||
assert len(danger(remotes)) == 9
|
||||
|
||||
|
||||
def test_duplicate():
|
||||
remotes = RemoteFileFactory.batch(10)
|
||||
remotes[0].id = 1
|
||||
remotes[1].id = 1
|
||||
|
||||
assert len(duplicate(remotes)) == 9
|
||||
|
||||
|
||||
def test_inactive():
|
||||
remotes = RemoteFileFactory.batch(10)
|
||||
remotes[0].seeds = 0
|
||||
remotes[0].downloads = 0
|
||||
|
||||
assert len(inactive(remotes)) == 9
|
||||
|
||||
|
||||
def test_trusted(monkeypatch: MonkeyPatch):
|
||||
monkeypatch.setenv("TRUSTED_WORDS", "one,two")
|
||||
remotes = RemoteFileFactory.batch(10)
|
||||
remotes[0].name = "oui one non"
|
||||
remotes[1].name = "non two oui"
|
||||
|
||||
alter_remotes = trusted(remotes)
|
||||
|
||||
assert alter_remotes[0].color == Color.PRIMARY
|
||||
assert alter_remotes[1].color == Color.PRIMARY
|
||||
assert alter_remotes[2].color == Color.DEFAULT
|