From 9b6cae5dc2de6f4580e3b51c01d698f0870e5ee3 Mon Sep 17 00:00:00 2001 From: Michel Roux Date: Thu, 9 Apr 2020 11:31:35 +0200 Subject: [PATCH] Fix pytnon complaining about is keyword with literals --- connectors.py | 10 +++++----- get404.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/connectors.py b/connectors.py index 7bcce30..54be678 100644 --- a/connectors.py +++ b/connectors.py @@ -206,7 +206,7 @@ class Nyaa(Connector): def search(self): response = curl_content(self.get_full_search_url()) - if response['http_code'] is 200: + if response['http_code'] == 200: html = BeautifulSoup(response['output'], 'html.parser') trs = html.select('table.torrent-list tr') valid_trs = 0 @@ -280,7 +280,7 @@ class Pantsu(Connector): def search(self): response = curl_content(self.get_full_search_url()) - if response['http_code'] is 200: + if response['http_code'] == 200: html = BeautifulSoup(response['output'], 'html.parser') trs = html.select('div.results tr') valid_trs = 0 @@ -365,7 +365,7 @@ class YggTorrent(Connector): if self.category: response = curl_content(self.get_full_search_url()) - if response['http_code'] is 200: + if response['http_code'] == 200: html = BeautifulSoup(response['output'], 'html.parser') trs = html.select('table.table tr') valid_trs = 0 @@ -440,7 +440,7 @@ class AnimeUltime(Connector): def search(self): response = curl_content(self.get_full_search_url(), {'search': self.query}) - if response['http_code'] is 200: + if response['http_code'] == 200: html = BeautifulSoup(response['output'], 'html.parser') title = html.select('div.title') @@ -488,7 +488,7 @@ class AnimeUltime(Connector): def get_history(self): response = curl_content(self.get_full_search_url()) - if response['http_code'] is 200: + if response['http_code'] == 200: html = BeautifulSoup(response['output'], 'html.parser') tables = html.select('table.jtable') h3s = html.findAll('h3') diff --git a/get404.py b/get404.py index ac09f96..56d23e5 100644 --- a/get404.py +++ b/get404.py @@ -8,7 +8,7 @@ links = AnimeLink.query.all() for link in links: html = curl_content(link.link) - if html['http_code'] is not 200: + if html['http_code'] != 200: print('(%d) %s %s : %s' % (html['http_code'], link.title.name, link.season, link.link)) elif 'darkgray' in str(html['output']): print('(darkgray) %s %s : %s' % (link.title.name, link.season, link.link))