2014-03-10 01:18:05 -04:00
|
|
|
# Author: Mr_Orange <mr_orange@hotmail.it>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
|
|
|
# This file is part of Sick Beard.
|
|
|
|
#
|
|
|
|
# Sick Beard is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Sick Beard is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import traceback
|
2014-03-20 04:15:22 -04:00
|
|
|
import urllib, urlparse
|
2014-03-10 01:18:05 -04:00
|
|
|
import re
|
|
|
|
import datetime
|
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
import generic
|
2014-03-28 15:49:11 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
from sickbeard.common import Quality, Overview
|
|
|
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard import tvcache
|
|
|
|
from sickbeard import helpers
|
|
|
|
from sickbeard import db
|
|
|
|
from sickbeard import classes
|
|
|
|
from sickbeard.show_name_helpers import allPossibleShowNames, sanitizeSceneName
|
|
|
|
from sickbeard.exceptions import ex
|
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
from sickbeard import clients
|
|
|
|
|
|
|
|
from lib import requests
|
2014-04-02 01:44:47 -04:00
|
|
|
from lib.requests import exceptions
|
2014-03-10 01:18:05 -04:00
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
from lib.unidecode import unidecode
|
|
|
|
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
class PublicHDProvider(generic.TorrentProvider):
|
2014-03-10 01:18:05 -04:00
|
|
|
def __init__(self):
|
|
|
|
|
|
|
|
generic.TorrentProvider.__init__(self, "PublicHD")
|
|
|
|
|
|
|
|
self.supportsBacklog = True
|
|
|
|
|
|
|
|
self.cache = PublicHDCache(self)
|
|
|
|
|
|
|
|
self.url = 'http://phdproxy.com/'
|
|
|
|
|
|
|
|
self.searchurl = self.url + 'index.php?page=torrents&search=%s&active=0&category=%s&order=5&by=2' #order by seed
|
|
|
|
|
|
|
|
self.categories = {'Season': ['23'], 'Episode': ['7', '14', '24'], 'RSS': ['7', '14', '23', '24']}
|
|
|
|
|
|
|
|
def isEnabled(self):
|
|
|
|
return sickbeard.PUBLICHD
|
|
|
|
|
|
|
|
def imageName(self):
|
|
|
|
return 'publichd.png'
|
|
|
|
|
|
|
|
def getQuality(self, item):
|
|
|
|
|
|
|
|
quality = Quality.sceneQuality(item[0])
|
|
|
|
return quality
|
|
|
|
|
2014-04-29 09:14:19 -04:00
|
|
|
def _get_season_search_strings(self, season, episode):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-04-27 06:31:54 -04:00
|
|
|
search_string = {'Season': [], 'Episode': []}
|
2014-04-29 09:14:19 -04:00
|
|
|
for show_name in set(allPossibleShowNames(self.show)):
|
|
|
|
ep_string = show_name + ' S%02d' % int(season) #1) showName SXX -SXXE
|
2014-04-27 06:31:54 -04:00
|
|
|
search_string['Season'].append(ep_string)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-04-29 09:14:19 -04:00
|
|
|
ep_string = show_name + ' Season ' + str(season) #2) showName Season X
|
2014-04-27 06:31:54 -04:00
|
|
|
search_string['Season'].append(ep_string)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-04-29 09:14:19 -04:00
|
|
|
search_string['Episode'] = self._get_episode_search_strings(season, episode)[0]['Episode']
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
return [search_string]
|
|
|
|
|
2014-04-29 09:14:19 -04:00
|
|
|
def _get_episode_search_strings(self, season, episode, add_string=''):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
search_string = {'Episode': []}
|
|
|
|
|
2014-04-27 06:31:54 -04:00
|
|
|
if not episode:
|
2014-03-10 01:18:05 -04:00
|
|
|
return []
|
|
|
|
|
2014-04-29 09:14:19 -04:00
|
|
|
if self.show.air_by_date:
|
|
|
|
for show_name in set(allPossibleShowNames(self.show)):
|
2014-03-25 01:57:24 -04:00
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
2014-04-27 10:48:19 -04:00
|
|
|
str(episode).replace('-', '|') + '|' + \
|
2014-04-28 05:15:29 -04:00
|
|
|
episode.strftime('%b')
|
|
|
|
search_string['Episode'].append(ep_string)
|
2014-04-29 09:14:19 -04:00
|
|
|
elif self.show.sports:
|
|
|
|
for show_name in set(allPossibleShowNames(self.show)):
|
2014-04-28 05:15:29 -04:00
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
|
|
|
str(episode).replace('-', '|') + '|' + \
|
|
|
|
episode.strftime('%b')
|
2014-03-10 01:18:05 -04:00
|
|
|
search_string['Episode'].append(ep_string)
|
|
|
|
else:
|
2014-04-29 09:14:19 -04:00
|
|
|
for show_name in set(allPossibleShowNames(self.show)):
|
2014-03-10 01:18:05 -04:00
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
2014-04-27 06:31:54 -04:00
|
|
|
sickbeard.config.naming_ep_type[2] % {'seasonnumber': season,
|
|
|
|
'episodenumber': episode}
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
for x in add_string.split('|'):
|
2014-03-25 01:57:24 -04:00
|
|
|
to_search = re.sub('\s+', ' ', ep_string + ' %s' % x)
|
2014-03-10 01:18:05 -04:00
|
|
|
search_string['Episode'].append(to_search)
|
|
|
|
|
|
|
|
return [search_string]
|
|
|
|
|
2014-04-05 07:34:49 -04:00
|
|
|
def _doSearch(self, search_params, show=None, age=None):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
results = []
|
|
|
|
items = {'Season': [], 'Episode': [], 'RSS': []}
|
|
|
|
|
|
|
|
for mode in search_params.keys():
|
|
|
|
for search_string in search_params[mode]:
|
|
|
|
|
|
|
|
if mode == 'RSS':
|
2014-03-25 01:57:24 -04:00
|
|
|
searchURL = self.url + 'index.php?page=torrents&active=1&category=%s' % (
|
2014-03-28 01:59:45 -04:00
|
|
|
';'.join(self.categories[mode]))
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(u"PublicHD cache update URL: " + searchURL, logger.DEBUG)
|
2014-03-10 01:18:05 -04:00
|
|
|
else:
|
2014-03-25 01:57:24 -04:00
|
|
|
searchURL = self.searchurl % (
|
2014-03-28 01:59:45 -04:00
|
|
|
urllib.quote(unidecode(search_string)), ';'.join(self.categories[mode]))
|
2014-03-10 01:18:05 -04:00
|
|
|
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
|
|
|
|
2014-03-28 01:59:45 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
html = self.getURL(searchURL)
|
2014-03-28 01:59:45 -04:00
|
|
|
|
|
|
|
#remove unneccecary <option> lines which are slowing down BeautifulSoup
|
|
|
|
optreg = re.compile(r'<option.*</option>')
|
|
|
|
html = os.linesep.join([s for s in html.splitlines() if not optreg.search(s)])
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
if not html:
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
torrent_table = soup.find('table', attrs={'id': 'torrbg'})
|
2014-03-10 01:18:05 -04:00
|
|
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
|
|
|
|
|
|
|
#Continue only if one Release is found
|
2014-03-25 01:57:24 -04:00
|
|
|
if len(torrent_rows) < 2:
|
|
|
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
|
|
|
logger.DEBUG)
|
2014-03-10 01:18:05 -04:00
|
|
|
continue
|
|
|
|
|
|
|
|
for tr in torrent_rows[1:]:
|
|
|
|
|
|
|
|
try:
|
|
|
|
link = self.url + tr.find(href=re.compile('page=torrent-details'))['href']
|
2014-03-25 01:57:24 -04:00
|
|
|
title = tr.find(lambda x: x.has_attr('title')).text.replace('_', '.')
|
2014-03-10 01:18:05 -04:00
|
|
|
url = tr.find(href=re.compile('magnet+'))['href']
|
|
|
|
seeders = int(tr.find_all('td', {'class': 'header'})[4].text)
|
|
|
|
leechers = int(tr.find_all('td', {'class': 'header'})[5].text)
|
|
|
|
except (AttributeError, TypeError):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if mode != 'RSS' and seeders == 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not title or not url:
|
|
|
|
continue
|
|
|
|
|
|
|
|
item = title, url, link, seeders, leechers
|
|
|
|
|
|
|
|
items[mode].append(item)
|
|
|
|
|
|
|
|
except Exception, e:
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
|
|
|
logger.ERROR)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
#For each search mode sort all the items by seeders
|
|
|
|
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
|
|
|
|
|
|
|
results += items[mode]
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
def _get_title_and_url(self, item):
|
|
|
|
|
|
|
|
title, url, id, seeders, leechers = item
|
|
|
|
|
|
|
|
if url:
|
2014-03-25 01:57:24 -04:00
|
|
|
url = url.replace('&', '&')
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
return (title, url)
|
|
|
|
|
2014-04-22 04:02:43 -04:00
|
|
|
def getURL(self, url, post_data=None, headers=None):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-04-27 06:31:54 -04:00
|
|
|
if not self.session:
|
|
|
|
self.session = requests.Session()
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
try:
|
|
|
|
# Remove double-slashes from url
|
|
|
|
parsed = list(urlparse.urlparse(url))
|
2014-03-25 01:57:24 -04:00
|
|
|
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
2014-03-10 01:18:05 -04:00
|
|
|
url = urlparse.urlunparse(parsed)
|
|
|
|
|
2014-04-27 06:31:54 -04:00
|
|
|
r = self.session.get(url, verify=False)
|
2014-03-10 01:18:05 -04:00
|
|
|
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
2014-03-10 01:18:05 -04:00
|
|
|
return None
|
|
|
|
|
|
|
|
if r.status_code != 200:
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
|
|
|
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
2014-03-10 01:18:05 -04:00
|
|
|
return None
|
|
|
|
|
|
|
|
return r.content
|
|
|
|
|
|
|
|
def downloadResult(self, result):
|
|
|
|
"""
|
|
|
|
Save the result to disk.
|
|
|
|
"""
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-04-27 06:31:54 -04:00
|
|
|
if not self.session:
|
|
|
|
self.session = requests.Session()
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
if not torrent_hash:
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
try:
|
2014-04-27 06:31:54 -04:00
|
|
|
r = self.session.get('http://torcache.net/torrent/' + torrent_hash + '.torrent', verify=False)
|
2014-03-10 01:18:05 -04:00
|
|
|
except Exception, e:
|
|
|
|
logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR)
|
|
|
|
return False
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
if not r.status_code == 200:
|
|
|
|
return False
|
2014-03-25 01:57:24 -04:00
|
|
|
|
|
|
|
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
|
|
|
|
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
2014-03-10 01:18:05 -04:00
|
|
|
magnetFileContent = r.content
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
try:
|
2014-03-10 01:18:05 -04:00
|
|
|
with open(magnetFileName, 'wb') as fileOut:
|
|
|
|
fileOut.write(magnetFileContent)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
helpers.chmodAsParent(magnetFileName)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-20 04:15:22 -04:00
|
|
|
except EnvironmentError, e:
|
2014-03-10 01:18:05 -04:00
|
|
|
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
|
|
|
return False
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE)
|
|
|
|
return True
|
|
|
|
|
|
|
|
def findPropers(self, search_date=datetime.datetime.today()):
|
|
|
|
|
|
|
|
results = []
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
sqlResults = db.DBConnection().select(
|
|
|
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
|
|
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
|
|
|
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
|
|
|
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
|
|
|
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
|
|
|
)
|
2014-03-10 01:18:05 -04:00
|
|
|
if not sqlResults:
|
|
|
|
return []
|
|
|
|
|
2014-04-29 09:14:19 -04:00
|
|
|
for sqlshow in sqlResults:
|
2014-04-29 11:16:17 -04:00
|
|
|
self.show = curshow = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
|
2014-04-29 09:14:19 -04:00
|
|
|
curEp = curshow.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
|
2014-04-28 05:15:29 -04:00
|
|
|
|
2014-04-30 08:10:13 -04:00
|
|
|
searchString = self._get_episode_search_strings(curEp.scene_season, curEp.airdate if curshow.air_by_date else curEp.scene_episode, add_string='PROPER|REPACK')
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-04-29 09:14:19 -04:00
|
|
|
for item in self._doSearch(searchString[0], show=curshow):
|
2014-03-10 01:18:05 -04:00
|
|
|
title, url = self._get_title_and_url(item)
|
|
|
|
results.append(classes.Proper(title, url, datetime.datetime.today()))
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
class PublicHDCache(tvcache.TVCache):
|
|
|
|
def __init__(self, provider):
|
|
|
|
|
|
|
|
tvcache.TVCache.__init__(self, provider)
|
|
|
|
|
|
|
|
# only poll ThePirateBay every 10 minutes max
|
|
|
|
self.minTime = 20
|
|
|
|
|
|
|
|
def updateCache(self):
|
|
|
|
|
|
|
|
if not self.shouldUpdate():
|
|
|
|
return
|
|
|
|
|
|
|
|
search_params = {'RSS': ['rss']}
|
|
|
|
rss_results = self.provider._doSearch(search_params)
|
|
|
|
|
|
|
|
if rss_results:
|
|
|
|
self.setLastUpdate()
|
|
|
|
else:
|
|
|
|
return []
|
|
|
|
|
|
|
|
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
|
|
|
|
self._clearCache()
|
|
|
|
|
|
|
|
cl = []
|
|
|
|
for result in rss_results:
|
|
|
|
item = (result[0], result[1])
|
|
|
|
ci = self._parseItem(item)
|
|
|
|
if ci is not None:
|
|
|
|
cl.append(ci)
|
|
|
|
|
|
|
|
if len(cl) > 0:
|
|
|
|
myDB = self._getDB()
|
|
|
|
myDB.mass_action(cl)
|
|
|
|
|
|
|
|
def _parseItem(self, item):
|
|
|
|
|
|
|
|
(title, url) = item
|
|
|
|
|
|
|
|
if not title or not url:
|
|
|
|
return None
|
|
|
|
|
|
|
|
logger.log(u"Adding item to cache: " + title, logger.DEBUG)
|
|
|
|
|
|
|
|
return self._addCacheEntry(title, url)
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
provider = PublicHDProvider()
|