Skip to content

Commit

Permalink
Merge pull request SickChill#728 from SickRage/providers
Browse files Browse the repository at this point in the history
Replace all instances of searchURL for search_url in providers
  • Loading branch information
miigotu committed Jan 12, 2016
2 parents d0d6c87 + e11deb6 commit 8e1b681
Show file tree
Hide file tree
Showing 21 changed files with 93 additions and 108 deletions.
4 changes: 2 additions & 2 deletions sickbeard/providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

import sickbeard
from sickbeard.providers import btn, newznab, rsstorrent, womble, thepiratebay, torrentleech, kat, iptorrents, torrentz, \
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, cpasbien, fnt, xthor, torrentbytes, \
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, fnt, xthor, torrentbytes, \
freshontv, morethantv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \
scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents

Expand All @@ -32,7 +32,7 @@
'torrentday', 'hdbits', 'hounddawgs', 'iptorrents', 'omgwtfnzbs',
'speedcd', 'nyaatorrents', 'torrentbytes', 'freshontv',
'morethantv', 'bitsoup', 't411', 'tokyotoshokan', 'alpharatio',
'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers', 'cpasbien',
'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers',
'fnt', 'xthor', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk',
'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker',
'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents'
Expand Down
59 changes: 22 additions & 37 deletions sickbeard/providers/cpasbien.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.

import traceback
import re

from sickbeard import logger
from sickbeard import tvcache
Expand All @@ -39,10 +38,9 @@ def __init__(self):
self.url = "http://www.cpasbien.io"

self.proper_strings = ['PROPER', 'REPACK']

self.cache = CpasbienCache(self)

def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-statements, too-many-branches
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
for mode in search_strings:
items = []
Expand All @@ -51,57 +49,44 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man

if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)
searchURL = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html'
search_url = self.url + '/recherche/' + search_string.replace('.', '-').replace(' ', '-') + '.html,trie-seeds-d'
else:
searchURL = self.url + '/view_cat.php?categorie=series&trie=date-d'
search_url = self.url + '/view_cat.php?categorie=series&trie=date-d'

logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
data = self.get_url(searchURL)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(search_url)
if not data:
continue

try:
with BS4Parser(data, 'html5lib') as html:
line = 0
torrents = []
while True:
resultlin = html.findAll(class_='ligne%i' % line)
if not resultlin:
break

torrents += resultlin
line += 1

for torrent in torrents:
try:
title = torrent.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien")
tmp = torrent.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip()
download_url = (self.url + '/telechargement/%s' % tmp)
seeders = try_int(torrent.find(class_="up").get_text(strip=True))
leechers = try_int(torrent.find(class_="down").get_text(strip=True))
torrent_size = torrent.find(class_="poid").get_text()

size = convert_size(torrent_size) or -1
except (AttributeError, TypeError, KeyError, IndexError):
continue

with BS4Parser(data, 'html5lib') as html:
torrent_rows = html.find_all(class_=re.compile('ligne[01]'))
for result in torrent_rows:
try:
title = result.find(class_="titre").get_text(strip=True).replace("HDTV", "HDTV x264-CPasBien")
tmp = result.find("a")['href'].split('/')[-1].replace('.html', '.torrent').strip()
download_url = (self.url + '/telechargement/%s' % tmp)
if not all([title, download_url]):
continue

# Filter unseeded torrent
seeders = try_int(result.find(class_="up").get_text(strip=True))
leechers = try_int(result.find(class_="down").get_text(strip=True))
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
continue

torrent_size = result.find(class_="poid").get_text(strip=True)

units = ['o', 'Ko', 'Mo', 'Go', 'To', 'Po']
size = convert_size(torrent_size, units=units) or -1

item = title, download_url, size, seeders, leechers
if mode != 'RSS':
logger.log(u"Found result: %s " % title, logger.DEBUG)

items.append(item)

except Exception:
logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR)
except StandardError:
continue

# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
Expand Down
6 changes: 3 additions & 3 deletions sickbeard/providers/elitetorrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man
search_string = re.sub(r'S0*(\d*)E(\d*)', r'\1x\2', search_string)
self.search_params['buscar'] = search_string.strip() if mode != 'RSS' else ''

searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
search_url = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)

data = self.get_url(searchURL, timeout=30)
data = self.get_url(search_url, timeout=30)
if not data:
continue

Expand Down
12 changes: 6 additions & 6 deletions sickbeard/providers/freshontv.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,9 +116,9 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)

searchURL = self.urls['search'] % (freeleech, search_string)
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
init_html = self.get_url(searchURL)
search_url = self.urls['search'] % (freeleech, search_string)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
init_html = self.get_url(search_url)
max_page_number = 0

if not init_html:
Expand Down Expand Up @@ -160,9 +160,9 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-
for i in range(1, max_page_number):

time.sleep(1)
page_searchURL = searchURL + '&page=' + str(i)
# '.log(u"Search string: " + page_searchURL, logger.DEBUG)
page_html = self.get_url(page_searchURL)
page_search_url = search_url + '&page=' + str(i)
# '.log(u"Search string: " + page_search_url, logger.DEBUG)
page_html = self.get_url(page_search_url)

if not page_html:
continue
Expand Down
6 changes: 3 additions & 3 deletions sickbeard/providers/gftracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,11 +97,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)

searchURL = self.urls['search'] % (self.categories, search_string)
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
search_url = self.urls['search'] % (self.categories, search_string)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)

# Returns top 30 results by default, expandable in user profile
data = self.get_url(searchURL)
data = self.get_url(search_url)
if not data:
continue

Expand Down
8 changes: 4 additions & 4 deletions sickbeard/providers/hdspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,15 +92,15 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many
logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
searchURL = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')),)
search_url = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')),)
else:
searchURL = self.urls['search'] % ''
search_url = self.urls['search'] % ''

logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
if mode != 'RSS':
logger.log(u"Search string: %s" % search_string, logger.DEBUG)

data = self.get_url(searchURL)
data = self.get_url(search_url)
if not data or 'please try later' in data:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
Expand Down
10 changes: 5 additions & 5 deletions sickbeard/providers/hdtorrents.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,17 +92,17 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man
for search_string in search_strings[mode]:

if mode != 'RSS':
searchURL = self.urls['search'] % (urllib.quote_plus(search_string), self.categories)
search_url = self.urls['search'] % (urllib.quote_plus(search_string), self.categories)
logger.log(u"Search string: %s" % search_string, logger.DEBUG)
else:
searchURL = self.urls['rss'] % self.categories
search_url = self.urls['rss'] % self.categories

if self.freeleech:
searchURL = searchURL.replace('active=1', 'active=5')
search_url = search_url.replace('active=1', 'active=5')

logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)

data = self.get_url(searchURL)
data = self.get_url(search_url)
if not data or 'please try later' in data:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
Expand Down
8 changes: 4 additions & 4 deletions sickbeard/providers/iptorrents.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,11 +95,11 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many-
logger.log(u"Search string: %s " % search_string, logger.DEBUG)

# URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
searchURL = self.urls['search'] % (self.categories, freeleech, search_string)
searchURL += ';o=seeders' if mode != 'RSS' else ''
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
search_url = self.urls['search'] % (self.categories, freeleech, search_string)
search_url += ';o=seeders' if mode != 'RSS' else ''
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)

data = self.get_url(searchURL)
data = self.get_url(search_url)
if not data:
continue

Expand Down
8 changes: 4 additions & 4 deletions sickbeard/providers/kat.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,12 +82,12 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many

url_fmt_string = 'usearch' if mode != 'RSS' else search_string
try:
searchURL = self.urls['search'] % url_fmt_string + '?' + urlencode(self.search_params)
search_url = self.urls['search'] % url_fmt_string + '?' + urlencode(self.search_params)
if self.custom_url:
searchURL = posixpath.join(self.custom_url, searchURL.split(self.url)[1].lstrip('/')) # Must use posixpath
search_url = posixpath.join(self.custom_url, search_url.split(self.url)[1].lstrip('/')) # Must use posixpath

logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
data = self.get_url(searchURL)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(search_url)
if not data:
logger.log(u'URL did not return data, maybe try a custom url, or a different one', logger.DEBUG)
continue
Expand Down
6 changes: 3 additions & 3 deletions sickbeard/providers/newpct.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,10 +92,10 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man
self.search_params['q'] = search_string.strip() if mode != 'RSS' else ''
self.search_params['bus_de_'] = 'All' if mode != 'RSS' else 'hoy'

searchURL = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
search_url = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)

data = self.get_url(searchURL, timeout=30)
data = self.get_url(search_url, timeout=30)
if not data:
continue

Expand Down
6 changes: 3 additions & 3 deletions sickbeard/providers/nyaatorrents.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,14 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man
if mode != 'RSS':
params["term"] = search_string.encode('utf-8')

searchURL = self.url + '?' + urllib.urlencode(params)
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
search_url = self.url + '?' + urllib.urlencode(params)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)

summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)"
s = re.compile(summary_regex, re.DOTALL)

results = []
for curItem in self.cache.getRSSFeed(searchURL)['entries'] or []:
for curItem in self.cache.getRSSFeed(search_url)['entries'] or []:
title = curItem['title']
download_url = curItem['link']
if not all([title, download_url]):
Expand Down
6 changes: 3 additions & 3 deletions sickbeard/providers/pretome.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,10 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)

searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
search_url = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)

data = self.get_url(searchURL)
data = self.get_url(search_url)
if not data:
continue

Expand Down
6 changes: 3 additions & 3 deletions sickbeard/providers/scc.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,11 +99,11 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)

searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories[mode])
search_url = self.urls['search'] % (urllib.quote(search_string), self.categories[mode])

try:
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
data = self.get_url(searchURL)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(search_url)
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
except Exception as e:
logger.log(u"Unable to fetch data. Error: %s" % repr(e), logger.WARNING)
Expand Down
6 changes: 3 additions & 3 deletions sickbeard/providers/scenetime.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,10 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)

searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories)
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
search_url = self.urls['search'] % (urllib.quote(search_string), self.categories)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)

data = self.get_url(searchURL)
data = self.get_url(search_url)
if not data:
continue

Expand Down
6 changes: 3 additions & 3 deletions sickbeard/providers/strike.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@ def search(self, search_strings, age=0, ep_obj=None):
if mode != 'RSS':
logger.log(u"Search string: " + search_string.strip(), logger.DEBUG)

searchURL = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
jdata = self.get_url(searchURL, json=True)
search_url = self.url + "api/v2/torrents/search/?category=TV&phrase=" + search_string
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
jdata = self.get_url(search_url, json=True)
if not jdata:
logger.log(u"No data returned from provider", logger.DEBUG)
return []
Expand Down
8 changes: 4 additions & 4 deletions sickbeard/providers/t411.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,10 @@ def search(self, search_params, age=0, ep_obj=None): # pylint: disable=too-many
if mode != 'RSS':
logger.log(u"Search string: %s " % search_string, logger.DEBUG)

searchURLS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS']
for searchURL in searchURLS:
logger.log(u"Search URL: %s" % searchURL, logger.DEBUG)
data = self.get_url(searchURL, json=True)
search_urlS = ([self.urls['search'] % (search_string, u) for u in self.subcategories], [self.urls['rss']])[mode == 'RSS']
for search_url in search_urlS:
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
data = self.get_url(search_url, json=True)
if not data:
continue

Expand Down
Loading

0 comments on commit 8e1b681

Please sign in to comment.