Cleaning mess
This commit is contained in:
parent
0adca89e76
commit
fc019007fa
3
app.py
3
app.py
@ -1,4 +1,3 @@
|
|||||||
from logging import getLogger
|
|
||||||
from operator import attrgetter, itemgetter
|
from operator import attrgetter, itemgetter
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
@ -134,7 +133,7 @@ def clean_model(obj):
|
|||||||
if not attr.startswith('_') and getattr(obj, attr) is None:
|
if not attr.startswith('_') and getattr(obj, attr) is None:
|
||||||
try:
|
try:
|
||||||
setattr(obj, attr, '')
|
setattr(obj, attr, '')
|
||||||
except Exception:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ from urllib.parse import quote
|
|||||||
|
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from cloudscraper import create_scraper
|
from cloudscraper import create_scraper
|
||||||
|
from requests import RequestException
|
||||||
|
|
||||||
from config import IS_DEBUG, CACHE_TIMEOUT, BLACKLIST_WORDS
|
from config import IS_DEBUG, CACHE_TIMEOUT, BLACKLIST_WORDS
|
||||||
from models import AnimeLink
|
from models import AnimeLink
|
||||||
@ -78,6 +79,29 @@ class Cache:
|
|||||||
ConnectorCache = Cache()
|
ConnectorCache = Cache()
|
||||||
|
|
||||||
|
|
||||||
|
def curl_content(url, params=None, ajax=False):
|
||||||
|
if ajax:
|
||||||
|
headers = {'X-Requested-With': 'XMLHttpRequest'}
|
||||||
|
else:
|
||||||
|
headers = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if params is not None:
|
||||||
|
response = scraper.post(url, params, timeout=5, headers=headers)
|
||||||
|
else:
|
||||||
|
response = scraper.get(url, timeout=5, headers=headers)
|
||||||
|
|
||||||
|
output = response.text
|
||||||
|
http_code = response.status_code
|
||||||
|
except RequestException as e:
|
||||||
|
output = ''
|
||||||
|
http_code = 500
|
||||||
|
if IS_DEBUG:
|
||||||
|
getLogger().exception(e)
|
||||||
|
|
||||||
|
return {'http_code': http_code, 'output': output}
|
||||||
|
|
||||||
|
|
||||||
class Connector(ABC):
|
class Connector(ABC):
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@ -132,28 +156,6 @@ class Connector(ABC):
|
|||||||
self.get_history()
|
self.get_history()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def curl_content(self, url, params=None, ajax=False):
|
|
||||||
if ajax:
|
|
||||||
headers = {'X-Requested-With': 'XMLHttpRequest'}
|
|
||||||
else:
|
|
||||||
headers = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
if params is not None:
|
|
||||||
response = scraper.post(url, params, timeout=5, headers=headers)
|
|
||||||
else:
|
|
||||||
response = scraper.get(url, timeout=5, headers=headers)
|
|
||||||
|
|
||||||
output = response.text
|
|
||||||
http_code = response.status_code
|
|
||||||
except Exception as e:
|
|
||||||
output = ''
|
|
||||||
http_code = 500
|
|
||||||
if IS_DEBUG:
|
|
||||||
getLogger().exception(e)
|
|
||||||
|
|
||||||
return {'http_code': http_code, 'output': output}
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_instance(url, query):
|
def get_instance(url, query):
|
||||||
if 'nyaa.si' in url:
|
if 'nyaa.si' in url:
|
||||||
@ -202,7 +204,7 @@ class Nyaa(Connector):
|
|||||||
|
|
||||||
@ConnectorCache.cache_data
|
@ConnectorCache.cache_data
|
||||||
def search(self):
|
def search(self):
|
||||||
response = self.curl_content(self.get_full_search_url())
|
response = curl_content(self.get_full_search_url())
|
||||||
|
|
||||||
if response['http_code'] is 200:
|
if response['http_code'] is 200:
|
||||||
html = BeautifulSoup(response['output'], 'html.parser')
|
html = BeautifulSoup(response['output'], 'html.parser')
|
||||||
@ -274,7 +276,7 @@ class Pantsu(Connector):
|
|||||||
|
|
||||||
@ConnectorCache.cache_data
|
@ConnectorCache.cache_data
|
||||||
def search(self):
|
def search(self):
|
||||||
response = self.curl_content(self.get_full_search_url())
|
response = curl_content(self.get_full_search_url())
|
||||||
|
|
||||||
if response['http_code'] is 200:
|
if response['http_code'] is 200:
|
||||||
html = BeautifulSoup(response['output'], 'html.parser')
|
html = BeautifulSoup(response['output'], 'html.parser')
|
||||||
@ -320,9 +322,8 @@ class Pantsu(Connector):
|
|||||||
'href': href,
|
'href': href,
|
||||||
'name': url_safe,
|
'name': url_safe,
|
||||||
'comment': '',
|
'comment': '',
|
||||||
'link': tds[2].decode_contents()
|
'link': tds[2].decode_contents().replace('icon-magnet', 'fa fa-fw fa-magnet').replace(
|
||||||
.replace('icon-magnet', 'fa fa-fw fa-magnet')
|
'icon-floppy', 'fa fa-fw fa-download'),
|
||||||
.replace('icon-floppy', 'fa fa-fw fa-download'),
|
|
||||||
'size': tds[3].string,
|
'size': tds[3].string,
|
||||||
'date': formatted_date,
|
'date': formatted_date,
|
||||||
'seeds': check_seeds,
|
'seeds': check_seeds,
|
||||||
@ -360,7 +361,7 @@ class YggTorrent(Connector):
|
|||||||
@ConnectorCache.cache_data
|
@ConnectorCache.cache_data
|
||||||
def search(self):
|
def search(self):
|
||||||
if self.category:
|
if self.category:
|
||||||
response = self.curl_content(self.get_full_search_url())
|
response = curl_content(self.get_full_search_url())
|
||||||
|
|
||||||
if response['http_code'] is 200:
|
if response['http_code'] is 200:
|
||||||
html = BeautifulSoup(response['output'], 'html.parser')
|
html = BeautifulSoup(response['output'], 'html.parser')
|
||||||
@ -434,7 +435,7 @@ class AnimeUltime(Connector):
|
|||||||
|
|
||||||
@ConnectorCache.cache_data
|
@ConnectorCache.cache_data
|
||||||
def search(self):
|
def search(self):
|
||||||
response = self.curl_content(self.get_full_search_url(), {'search': self.query})
|
response = curl_content(self.get_full_search_url(), {'search': self.query})
|
||||||
|
|
||||||
if response['http_code'] is 200:
|
if response['http_code'] is 200:
|
||||||
html = BeautifulSoup(response['output'], 'html.parser')
|
html = BeautifulSoup(response['output'], 'html.parser')
|
||||||
@ -482,7 +483,7 @@ class AnimeUltime(Connector):
|
|||||||
|
|
||||||
@ConnectorCache.cache_data
|
@ConnectorCache.cache_data
|
||||||
def get_history(self):
|
def get_history(self):
|
||||||
response = self.curl_content(self.get_full_search_url())
|
response = curl_content(self.get_full_search_url())
|
||||||
|
|
||||||
if response['http_code'] is 200:
|
if response['http_code'] is 200:
|
||||||
html = BeautifulSoup(response['output'], 'html.parser')
|
html = BeautifulSoup(response['output'], 'html.parser')
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
from config import app
|
from config import app
|
||||||
from connectors import Connector
|
from connectors import curl_content
|
||||||
from models import AnimeLink
|
from models import AnimeLink
|
||||||
|
|
||||||
app.config['SQLALCHEMY_ECHO'] = False
|
app.config['SQLALCHEMY_ECHO'] = False
|
||||||
links = AnimeLink.query.all()
|
links = AnimeLink.query.all()
|
||||||
|
|
||||||
for link in links:
|
for link in links:
|
||||||
connect = Connector.get_instance(link.link, link.title.keyword)
|
html = curl_content(link.link)
|
||||||
html = connect.curl_content(link.link)
|
|
||||||
|
|
||||||
if html['http_code'] is not 200:
|
if html['http_code'] is not 200:
|
||||||
print('(%d) %s %s : %s' % (html['http_code'], link.title.name, link.season, link.link))
|
print('(%d) %s %s : %s' % (html['http_code'], link.title.name, link.season, link.link))
|
||||||
|
@ -2,7 +2,8 @@ Flask==1.0.2
|
|||||||
Flask-SQLAlchemy==2.1
|
Flask-SQLAlchemy==2.1
|
||||||
SQLAlchemy==1.2.18
|
SQLAlchemy==1.2.18
|
||||||
Flask-HTTPAuth==3.2.4
|
Flask-HTTPAuth==3.2.4
|
||||||
Flask-WTF===0.14.2
|
Flask-WTF==0.14.2
|
||||||
|
WTForms==2.2.1
|
||||||
PyMySQL==0.9.3
|
PyMySQL==0.9.3
|
||||||
requests==2.21.0
|
requests==2.21.0
|
||||||
beautifulsoup4==4.7.1
|
beautifulsoup4==4.7.1
|
||||||
|
Reference in New Issue
Block a user