From e11deb67d95f614f6ed0b2a9f63622e4c0481b8c Mon Sep 17 00:00:00 2001 From: miigotu Date: Mon, 11 Jan 2016 16:27:30 -0800 Subject: [PATCH] Replace all instances of searchURL for search_url in providers Rework cpasbian, but it is blocking us with CF with the js page, disabled in code until it can be resolved --- sickbeard/providers/__init__.py | 4 +- sickbeard/providers/cpasbien.py | 59 ++++++++++----------------- sickbeard/providers/elitetorrent.py | 6 +-- sickbeard/providers/freshontv.py | 12 +++--- sickbeard/providers/gftracker.py | 6 +-- sickbeard/providers/hdspace.py | 8 ++-- sickbeard/providers/hdtorrents.py | 10 ++--- sickbeard/providers/iptorrents.py | 8 ++-- sickbeard/providers/kat.py | 8 ++-- sickbeard/providers/newpct.py | 6 +-- sickbeard/providers/nyaatorrents.py | 6 +-- sickbeard/providers/pretome.py | 6 +-- sickbeard/providers/scc.py | 6 +-- sickbeard/providers/scenetime.py | 6 +-- sickbeard/providers/strike.py | 6 +-- sickbeard/providers/t411.py | 8 ++-- sickbeard/providers/titansoftv.py | 6 +-- sickbeard/providers/tntvillage.py | 8 ++-- sickbeard/providers/torrentbytes.py | 6 +-- sickbeard/providers/torrentproject.py | 10 ++--- sickbeard/providers/xthor.py | 6 +-- 21 files changed, 93 insertions(+), 108 deletions(-) diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 483f379736..a32b9ef8c9 100644 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -23,7 +23,7 @@ import sickbeard from sickbeard.providers import btn, newznab, rsstorrent, womble, thepiratebay, torrentleech, kat, iptorrents, torrentz, \ - omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, cpasbien, fnt, xthor, torrentbytes, \ + omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, fnt, xthor, torrentbytes, \ freshontv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents @@ -32,7 +32,7 @@ 'torrentday', 'hdbits', 'hounddawgs', 'iptorrents', 'omgwtfnzbs', 'speedcd', 'nyaatorrents', 'torrentbytes', 'freshontv', 'morethantv', 'bitsoup', 't411', 'tokyotoshokan', 'alpharatio', - 'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers', 'cpasbien', + 'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers', 'fnt', 'xthor', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker', 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents' diff --git a/sickbeard/providers/cpasbien.py b/sickbeard/providers/cpasbien.py index 9182ccdb6f..b4f509d356 100644 --- a/sickbeard/providers/cpasbien.py +++ b/sickbeard/providers/cpasbien.py @@ -16,8 +16,7 @@ # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see . - -import traceback +import re from sickbeard import logger from sickbeard import tvcache @@ -39,10 +38,9 @@ def __init__(self): self.url = "http://www.cpasbien.io" self.proper_strings = ['PROPER', 'REPACK'] - self.cache = CpasbienCache(self) - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-statements, too-many-branches + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] for mode in search_strings: items = [] @@ -51,57 +49,44 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURL = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html' + search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d' else: - searchURL = self.url + '/view_cat.php?categorie=series&trie=date-d' + search_url = self.url + '/view_cat.php?categorie=series&trie=date-d' - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.get_url(searchURL) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) + data = self.get_url(search_url) if not data: continue - try: - with BS4Parser(data, 'html5lib') as html: - line = 0 - torrents = [] - while True: - resultlin = html.findAll(class_='ligne%i' % line) - if not resultlin: - break - - torrents += resultlin - line += 1 - - for torrent in torrents: - try: - title = torrent.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien") - tmp = torrent.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip() - download_url = (self.url + '/telechargement/%s' % tmp) - seeders = try_int(torrent.find(class_="up").get_text(strip=True)) - leechers = try_int(torrent.find(class_="down").get_text(strip=True)) - torrent_size = torrent.find(class_="poid").get_text() - - size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError, KeyError, IndexError): - continue - + with BS4Parser(data, 'html5lib') as html: + torrent_rows = html.find_all(class_=re.compile('ligne[01]')) + for result in torrent_rows: + try: + title = result.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien") + tmp = result.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip() + download_url = (self.url + '/telechargement/%s' % tmp) if not all([title, download_url]): continue - # Filter unseeded torrent + seeders = try_int(result.find(class_="up").get_text(strip=True)) + leechers = try_int(result.find(class_="down").get_text(strip=True)) if seeders < self.minseed or leechers < self.minleech: if mode != 'RSS': logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG) continue + torrent_size = result.find(class_="poid").get_text(strip=True) + + units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po'] + size = convert_size(torrent_size, units=units) or -1 + item = title, download_url, size, seeders, leechers if mode != 'RSS': logger.log(u"Found result: %s " % title, logger.DEBUG) items.append(item) - - except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) + except StandardError: + continue # For each search mode sort all the items by seeders if available items.sort(key=lambda tup: tup[3], reverse=True) diff --git a/sickbeard/providers/elitetorrent.py b/sickbeard/providers/elitetorrent.py index d3b9f2e522..a267c8dc5d 100644 --- a/sickbeard/providers/elitetorrent.py +++ b/sickbeard/providers/elitetorrent.py @@ -87,10 +87,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string) self.search_params['buscar'] = search_string.strip() if mode != 'RSS' else '' - searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + search_url = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - data = self.get_url(searchURL, timeout=30) + data = self.get_url(search_url, timeout=30) if not data: continue diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 30edf29736..3efefdf0f1 100644 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -116,9 +116,9 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many- if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURL = self.urls['search'] % (freeleech, search_string) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - init_html = self.get_url(searchURL) + search_url = self.urls['search'] % (freeleech, search_string) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) + init_html = self.get_url(search_url) max_page_number = 0 if not init_html: @@ -160,9 +160,9 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many- for i in range(1, max_page_number): time.sleep(1) - page_searchURL = searchURL + '&page=' + str(i) - # '.log(u"Search string: " + page_searchURL, logger.DEBUG) - page_html = self.get_url(page_searchURL) + page_search_url = search_url + '&page=' + str(i) + # '.log(u"Search string: " + page_search_url, logger.DEBUG) + page_html = self.get_url(page_search_url) if not page_html: continue diff --git a/sickbeard/providers/gftracker.py b/sickbeard/providers/gftracker.py index 72c96af92b..6b381325eb 100644 --- a/sickbeard/providers/gftracker.py +++ b/sickbeard/providers/gftracker.py @@ -97,11 +97,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURL = self.urls['search'] % (self.categories, search_string) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + search_url = self.urls['search'] % (self.categories, search_string) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) # Returns top 30 results by default, expandable in user profile - data = self.get_url(searchURL) + data = self.get_url(search_url) if not data: continue diff --git a/sickbeard/providers/hdspace.py b/sickbeard/providers/hdspace.py index 2fdf868bad..a5a543e632 100644 --- a/sickbeard/providers/hdspace.py +++ b/sickbeard/providers/hdspace.py @@ -92,15 +92,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many logger.log(u"Search Mode: %s" % mode, logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': - searchURL = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')),) + search_url = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')),) else: - searchURL = self.urls['search'] % '' + search_url = self.urls['search'] % '' - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) if mode != 'RSS': logger.log(u"Search string: %s" % search_string, logger.DEBUG) - data = self.get_url(searchURL) + data = self.get_url(search_url) if not data or 'please try later' in data: logger.log(u"No data returned from provider", logger.DEBUG) continue diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 105489c6c6..8754a1b515 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -92,17 +92,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man for search_string in search_strings[mode]: if mode != 'RSS': - searchURL = self.urls['search'] % (urllib.quote_plus(search_string), self.categories) + search_url = self.urls['search'] % (urllib.quote_plus(search_string), self.categories) logger.log(u"Search string: %s" % search_string, logger.DEBUG) else: - searchURL = self.urls['rss'] % self.categories + search_url = self.urls['rss'] % self.categories if self.freeleech: - searchURL = searchURL.replace('active=1', 'active=5') + search_url = search_url.replace('active=1', 'active=5') - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - data = self.get_url(searchURL) + data = self.get_url(search_url) if not data or 'please try later' in data: logger.log(u"No data returned from provider", logger.DEBUG) continue diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index f1055e33ca..3e36754a83 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -95,11 +95,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many- logger.log(u"Search string: %s " % search_string, logger.DEBUG) # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile - searchURL = self.urls['search'] % (self.categories, freeleech, search_string) - searchURL += ';o=seeders' if mode != 'RSS' else '' - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + search_url = self.urls['search'] % (self.categories, freeleech, search_string) + search_url += ';o=seeders' if mode != 'RSS' else '' + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - data = self.get_url(searchURL) + data = self.get_url(search_url) if not data: continue diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 27105064f9..8a6a71092d 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -82,12 +82,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many url_fmt_string = 'usearch' if mode != 'RSS' else search_string try: - searchURL = self.urls['search'] % url_fmt_string + '?' + urlencode(self.search_params) + search_url = self.urls['search'] % url_fmt_string + '?' + urlencode(self.search_params) if self.custom_url: - searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath + search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/')) # Must use posixpath - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.get_url(searchURL) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) + data = self.get_url(search_url) if not data: logger.log(u'URL did not return data, maybe try a custom url, or a different one', logger.DEBUG) continue diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index fb5da04035..3a9153b5d4 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -92,10 +92,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man self.search_params['q'] = search_string.strip() if mode != 'RSS' else '' self.search_params['bus_de_'] = 'All' if mode != 'RSS' else 'hoy' - searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + search_url = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - data = self.get_url(searchURL, timeout=30) + data = self.get_url(search_url, timeout=30) if not data: continue diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index e4f96f4e32..e50ba7bf8d 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -67,14 +67,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != 'RSS': params["term"] = search_string.encode('utf-8') - searchURL = self.url + '?' + urllib.urlencode(params) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + search_url = self.url + '?' + urllib.urlencode(params) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)" s = re.compile(summary_regex, re.DOTALL) results = [] - for curItem in self.cache.getRSSFeed(searchURL)['entries'] or []: + for curItem in self.cache.getRSSFeed(search_url)['entries'] or []: title = curItem['title'] download_url = curItem['link'] if not all([title, download_url]): diff --git a/sickbeard/providers/pretome.py b/sickbeard/providers/pretome.py index b175728d6e..121ea1538a 100644 --- a/sickbeard/providers/pretome.py +++ b/sickbeard/providers/pretome.py @@ -91,10 +91,10 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + search_url = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - data = self.get_url(searchURL) + data = self.get_url(search_url) if not data: continue diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 04ce4c10f9..250b2e1ec1 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -99,11 +99,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories[mode]) + search_url = self.urls['search'] % (urllib.quote(search_string), self.categories[mode]) try: - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.get_url(searchURL) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) + data = self.get_url(search_url) time.sleep(cpu_presets[sickbeard.CPU_PRESET]) except Exception as e: logger.log(u"Unable to fetch data. Error: %s" % repr(e), logger.WARNING) diff --git a/sickbeard/providers/scenetime.py b/sickbeard/providers/scenetime.py index 23d72a4505..8e3eba84fb 100644 --- a/sickbeard/providers/scenetime.py +++ b/sickbeard/providers/scenetime.py @@ -82,10 +82,10 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + search_url = self.urls['search'] % (urllib.quote(search_string), self.categories) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - data = self.get_url(searchURL) + data = self.get_url(search_url) if not data: continue diff --git a/sickbeard/providers/strike.py b/sickbeard/providers/strike.py index 6cf31397bf..c3955871da 100644 --- a/sickbeard/providers/strike.py +++ b/sickbeard/providers/strike.py @@ -44,9 +44,9 @@ def search(self, search_strings, age=0, ep_obj=None): if mode != 'RSS': logger.log(u"Search string: " + search_string.strip(), logger.DEBUG) - searchURL = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - jdata = self.get_url(searchURL, json=True) + search_url = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) + jdata = self.get_url(search_url, json=True) if not jdata: logger.log(u"No data returned from provider", logger.DEBUG) return [] diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py index f8804ee77c..4775e25bf6 100644 --- a/sickbeard/providers/t411.py +++ b/sickbeard/providers/t411.py @@ -93,10 +93,10 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURLS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] - for searchURL in searchURLS: - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.get_url(searchURL, json=True) + search_urlS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS'] + for search_url in search_urlS: + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) + data = self.get_url(search_url, json=True) if not data: continue diff --git a/sickbeard/providers/titansoftv.py b/sickbeard/providers/titansoftv.py index dfb3044e85..7cb72144bd 100644 --- a/sickbeard/providers/titansoftv.py +++ b/sickbeard/providers/titansoftv.py @@ -64,11 +64,11 @@ def search(self, search_params, age=0, ep_obj=None): if search_params: params.update(search_params) - searchURL = self.url + '?' + urllib.urlencode(params) + search_url = self.url + '?' + urllib.urlencode(params) logger.log(u"Search string: %s " % search_params, logger.DEBUG) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - parsedJSON = self.get_url(searchURL, json=True) # do search + parsedJSON = self.get_url(search_url, json=True) # do search if not parsedJSON: logger.log(u"No data returned from provider", logger.DEBUG) diff --git a/sickbeard/providers/tntvillage.py b/sickbeard/providers/tntvillage.py index 70e1efb528..e790445029 100644 --- a/sickbeard/providers/tntvillage.py +++ b/sickbeard/providers/tntvillage.py @@ -302,15 +302,15 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many break if mode != 'RSS': - searchURL = (self.urls['search_page'] + '&filter={2}').format(z, self.categories, search_string) + search_url = (self.urls['search_page'] + '&filter={2}').format(z, self.categories, search_string) else: - searchURL = self.urls['search_page'].format(z, self.categories) + search_url = self.urls['search_page'].format(z, self.categories) if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.get_url(searchURL) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) + data = self.get_url(search_url) if not data: logger.log(u"No data returned from provider", logger.DEBUG) continue diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index 79fb3af8c9..08e9cf3ecc 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -87,10 +87,10 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) + search_url = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - data = self.get_url(searchURL) + data = self.get_url(search_url) if not data: continue diff --git a/sickbeard/providers/torrentproject.py b/sickbeard/providers/torrentproject.py index d4346cca07..b5b1584476 100644 --- a/sickbeard/providers/torrentproject.py +++ b/sickbeard/providers/torrentproject.py @@ -49,12 +49,12 @@ def search(self, search_strings, age=0, ep_obj=None): if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) - searchURL = self.urls['api'] + "?s=%s&out=json&filter=2101&num=150" % quote_plus(search_string.encode('utf-8')) + search_url = self.urls['api'] + "?s=%s&out=json&filter=2101&num=150" % quote_plus(search_string.encode('utf-8')) if self.custom_url: - searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath + search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/')) # Must use posixpath - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - torrents = self.get_url(searchURL, json=True) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) + torrents = self.get_url(search_url, json=True) if not (torrents and "total_found" in torrents and int(torrents["total_found"]) > 0): logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) continue @@ -81,7 +81,7 @@ def search(self, search_strings, age=0, ep_obj=None): logger.log(u"Torrent has less than 10 seeds getting dyn trackers: " + title, logger.DEBUG) trackerUrl = self.urls['api'] + "" + t_hash + "/trackers_json" if self.custom_url: - searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath + search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/')) # Must use posixpath jdata = self.get_url(trackerUrl, json=True) assert jdata != "maintenance" download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title + "".join(["&tr=" + s for s in jdata]) diff --git a/sickbeard/providers/xthor.py b/sickbeard/providers/xthor.py index 22271f4f4f..c5b479fa0b 100644 --- a/sickbeard/providers/xthor.py +++ b/sickbeard/providers/xthor.py @@ -108,9 +108,9 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man logger.log(u"Search string: %s " % search_string, logger.DEBUG) search_params['search'] = search_string - searchURL = self.urls['search'] + urlencode(search_params) - logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) - data = self.get_url(searchURL) + search_url = self.urls['search'] + urlencode(search_params) + logger.log(u"Search URL: %s" % search_url, logger.DEBUG) + data = self.get_url(search_url) if not data: continue