Cache managed by redis now
Some checks failed
continuous-integration/drone/push Build is failing

This commit is contained in:
Michel Roux 2021-07-10 09:25:35 +02:00
parent a6c48cc803
commit bc65d05f46
8 changed files with 36 additions and 40 deletions

View File

@ -5,11 +5,12 @@ MYSQL_USER=nyaa
MYSQL_PASSWORD=nyaa
MYSQL_DATABASE=nyaa
MYSQL_SERVER=db
REDIS_SERVER=redis
ADMIN_USERNAME=admin
ADMIN_PASSWORD=secret
REQUESTS_TIMEOUT=5
CACHE_TIMEOUT=3600
MYSQL_ROOT_PASSWORD=root
BLACKLIST_WORDS=Chris44,Vol.,[zza],.ssa,Ref:rain
BLACKLIST_WORDS=Chris44,Vol.,[zza],.ssa,.ass,Ref:rain
CLOUDPROXY_ENDPOINT=http://flaresolverr:8191/v1
CAPTCHA_SOLVER=hcaptcha-solver

View File

@ -26,6 +26,11 @@ services:
volumes:
- ./.db:/var/lib/mysql
redis:
image: redis
ports:
- "6379:6379"
flaresolverr:
image: flaresolverr/flaresolverr
ports:

View File

@ -6,6 +6,7 @@ from flask.cli import load_dotenv
from flask_apscheduler import APScheduler
from flask_httpauth import HTTPBasicAuth
from flask_sqlalchemy import SQLAlchemy
from redis import Redis
load_dotenv()
@ -18,6 +19,7 @@ REQUESTS_TIMEOUT = int(environ.get('REQUESTS_TIMEOUT', 5))
BLACKLIST_WORDS = environ.get('BLACKLIST_WORDS', '').split(',') if environ.get('BLACKLIST_WORDS', '') else []
CLOUDPROXY_ENDPOINT = environ.get('CLOUDPROXY_ENDPOINT')
MYSQL_ENABLED = False
REDIS_ENABLED = False
app = Flask(__name__)
app.name = 'PyNyaaTa'
@ -46,3 +48,8 @@ if db_host:
'pool_recycle': 200
}
db = SQLAlchemy(app)
cache_host = environ.get('REDIS_SERVER')
if cache_host:
REDIS_ENABLED = True
cache = Redis(cache_host)

View File

@ -58,7 +58,6 @@ class AnimeUltime(ConnectorCore):
'href': href,
'name': url.get_text(),
'type': tds[1].get_text(),
'date': parse_date(None),
'class': self.color if link_exist_in_db(href) else ''
})
elif len(player) > 0:
@ -71,7 +70,6 @@ class AnimeUltime(ConnectorCore):
'href': '%s/file-0-1/%s' % (self.base_url, player[0]['data-serie']),
'name': name[0].get_text(),
'type': ani_type[0].get_text().replace(':', ''),
'date': parse_date(None),
'class': self.color if link_exist_in_db(href) else ''
})

View File

@ -1,5 +1,4 @@
from abc import ABC, abstractmethod
from datetime import datetime
from enum import Enum
from functools import wraps
from json import dumps, loads
@ -8,7 +7,10 @@ from urllib.parse import urlencode
import requests
from requests import RequestException
from ..config import CACHE_TIMEOUT, REQUESTS_TIMEOUT, CLOUDPROXY_ENDPOINT, logger
from ..config import CACHE_TIMEOUT, REQUESTS_TIMEOUT, CLOUDPROXY_ENDPOINT, logger, REDIS_ENABLED
if REDIS_ENABLED:
from ..config import cache
cloudproxy_session = None
@ -24,48 +26,30 @@ class ConnectorLang(Enum):
class Cache:
CACHE_DATA = {}
def cache_data(self, f):
@wraps(f)
def wrapper(*args, **kwds):
connector = args[0]
timestamp = datetime.now().timestamp()
key = 'pynyaata.%s.%s.%s.%s' % (connector.__class__.__name__, f.__name__, connector.query, connector.page)
for connector_class in list(self.CACHE_DATA):
for connector_func in list(self.CACHE_DATA[connector_class]):
for connector_query in list(self.CACHE_DATA[connector_class][connector_func]):
for connector_page in list(self.CACHE_DATA[connector_class][connector_func][connector_query]):
if self.CACHE_DATA[connector_class][connector_func][connector_query][connector_page][
'timeout'
] < timestamp:
del self.CACHE_DATA[connector_class][connector_func][connector_query][connector_page]
if REDIS_ENABLED:
json = cache.get(key)
if connector.__class__.__name__ not in self.CACHE_DATA:
self.CACHE_DATA[connector.__class__.__name__] = {}
if f.__name__ not in self.CACHE_DATA[connector.__class__.__name__]:
self.CACHE_DATA[connector.__class__.__name__][f.__name__] = {}
if connector.query not in self.CACHE_DATA[connector.__class__.__name__][f.__name__]:
self.CACHE_DATA[connector.__class__.__name__][f.__name__][connector.query] = {}
if connector.page not in self.CACHE_DATA[connector.__class__.__name__][f.__name__][connector.query]:
self.CACHE_DATA[connector.__class__.__name__][f.__name__][connector.query][connector.page] = {
'timeout': 0.0
}
cached_data = self.CACHE_DATA[connector.__class__.__name__][f.__name__][connector.query][connector.page]
if cached_data['timeout'] > timestamp:
connector.data = cached_data['data']
connector.is_more = cached_data['is_more']
if json:
data = loads(json)
connector.data = data['data']
connector.is_more = data['is_more']
connector.on_error = False
return
ret = f(*args, **kwds)
if not connector.on_error:
self.CACHE_DATA[connector.__class__.__name__][f.__name__][connector.query][connector.page] = {
if not connector.on_error and REDIS_ENABLED:
cache.set(key, dumps({
'data': connector.data,
'timeout': timestamp + CACHE_TIMEOUT,
'is_more': connector.is_more
}
}), CACHE_TIMEOUT)
return ret
return wrapper

View File

@ -65,7 +65,7 @@ class Nyaa(ConnectorCore):
'comment': str(urls[0]).replace('/view/', self.base_url + '/view/') if has_comment else '',
'link': tds[2].decode_contents().replace('/download/', self.base_url + '/download/'),
'size': tds[3].get_text(),
'date': parse_date(tds[4].get_text(), '%Y-%m-%d %H:%M'),
'date': tds[4].get_text(),
'seeds': check_seeds,
'leechs': tds[6].get_text(),
'downloads': check_downloads,

View File

@ -21,9 +21,9 @@ def parse_date(str_to_parse, date_format=''):
else:
date = parse(str_to_parse, date_formats=[date_format])
if date:
return date
return date.isoformat(' ', 'minutes')
else:
return datetime.fromtimestamp(0)
return datetime.fromtimestamp(0).isoformat(' ', 'minutes')
def boldify(str_to_replace, keyword):

View File

@ -9,3 +9,4 @@ requests==2.25.1
beautifulsoup4==4.9.3
python-dotenv==0.18.0
dateparser==1.0.0
redis==3.5.3