Add log_async

This commit is contained in:
Michel Roux 2023-01-04 22:32:27 +01:00
parent 8ceebb80ae
commit 39e9d123b5
6 changed files with 28 additions and 13 deletions

View File

@ -2,12 +2,12 @@ from datetime import datetime, timedelta
from typing import List from typing import List
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
import dateparser from dateparser import parse
from pydantic import HttpUrl, parse_obj_as from pydantic import HttpUrl, parse_obj_as
from pynyaata.cache import cache_data from pynyaata.cache import cache_data
from pynyaata.filters import filter_data from pynyaata.filters import filter_data
from pynyaata.types import Bridge, Color, RemoteFile from pynyaata.types import Bridge, Color, RemoteFile, log_async
import requests import requests
@ -33,6 +33,7 @@ class AnimeUltime(Bridge):
), ),
) )
@log_async
@cache_data @cache_data
@filter_data @filter_data
async def search(self, query: str = "", page: int = 1) -> List[RemoteFile]: async def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:
@ -87,9 +88,7 @@ class AnimeUltime(Bridge):
category=tds[4].get_text(), category=tds[4].get_text(),
name=tds[0].get_text(), name=tds[0].get_text(),
link=f"{self.base_url}{tds[0].a['href']}", link=f"{self.base_url}{tds[0].a['href']}",
date=dateparser.parse( date=parse(h3s[i].get_text()[:-3], ["%A %d %B %Y"]),
h3s[i].get_text()[:-3], ["%A %d %B %Y"]
),
) )
) )
elif player and title and history and tables: elif player and title and history and tables:

View File

@ -8,7 +8,7 @@ from pydantic import HttpUrl, parse_obj_as
from pynyaata.cache import cache_data from pynyaata.cache import cache_data
from pynyaata.filters import filter_data from pynyaata.filters import filter_data
from pynyaata.types import Bridge, Color, RemoteFile from pynyaata.types import Bridge, Color, RemoteFile, log_async
import requests import requests
@ -37,6 +37,7 @@ class Nyaa(Bridge):
return parse_obj_as(HttpUrl, f"{self.base_url}?{params}") return parse_obj_as(HttpUrl, f"{self.base_url}?{params}")
@log_async
@cache_data @cache_data
@filter_data @filter_data
async def search(self, query: str = "", page: int = 1) -> List[RemoteFile]: async def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:

View File

@ -7,7 +7,7 @@ from pydantic import HttpUrl, parse_obj_as
from pynyaata.cache import cache_data from pynyaata.cache import cache_data
from pynyaata.filters import filter_data from pynyaata.filters import filter_data
from pynyaata.session import FlareRequests from pynyaata.session import FlareRequests
from pynyaata.types import Bridge, Color, RemoteFile from pynyaata.types import Bridge, Color, RemoteFile, log_async
from requests import HTTPError from requests import HTTPError
@ -32,6 +32,7 @@ class YggTorrent(Bridge):
return parse_obj_as(HttpUrl, f"{self.base_url}?{params}") return parse_obj_as(HttpUrl, f"{self.base_url}?{params}")
@log_async
@cache_data @cache_data
@filter_data @filter_data
async def search(self, query: str = "", page: int = 1) -> List[RemoteFile]: async def search(self, query: str = "", page: int = 1) -> List[RemoteFile]:

View File

@ -1,5 +1,5 @@
import logging
from functools import wraps from functools import wraps
from logging import error
from os import getenv from os import getenv
from typing import Optional from typing import Optional
@ -19,12 +19,12 @@ if REDIS_URL:
client = RedisCache() client = RedisCache()
except RedisError as e: except RedisError as e:
logging.error(e) error(e)
def cache_data(f): def cache_data(f):
@wraps(f) @wraps(f)
async def wrapper(*args, **kwds): async def wrapper(*args, **kwargs):
bridge = args[0] bridge = args[0]
query = args[1] query = args[1]
page = args[2] page = args[2]
@ -34,7 +34,7 @@ def cache_data(f):
if ret: if ret:
return ret return ret
ret = await f(*args, **kwds) ret = await f(*args, **kwargs)
client.set(key, ret) client.set(key, ret)
return ret return ret

View File

@ -50,8 +50,8 @@ def danger(remotes: List[RemoteFile]) -> List[RemoteFile]:
def filter_data(f): def filter_data(f):
@wraps(f) @wraps(f)
async def wrapper(*args, **kwds): async def wrapper(*args, **kwargs):
ret = await f(*args, **kwds) ret = await f(*args, **kwargs)
ret = duplicate(ret) ret = duplicate(ret)
ret = inactive(ret) ret = inactive(ret)

View File

@ -1,6 +1,8 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from datetime import datetime from datetime import datetime
from enum import Enum from enum import Enum
from functools import wraps
from logging import error
from typing import List, Optional from typing import List, Optional
from pydantic import BaseModel, ByteSize, HttpUrl from pydantic import BaseModel, ByteSize, HttpUrl
@ -62,3 +64,15 @@ class Cache(ABC):
@abstractmethod @abstractmethod
def set(self, key: str, data: List[RemoteFile]): def set(self, key: str, data: List[RemoteFile]):
pass pass
def log_async(f):
@wraps(f)
async def wrapper(*args, **kwargs):
try:
return await f(*args, **kwargs)
except Exception as e:
error(e)
raise e
return wrapper