mirror of
https://github.com/moparisthebest/SickRage
synced 2025-01-05 19:08:02 -05:00
Fixes issues with daily searcher not snatching its results when it has any to snatch.
Fixes issues with not properly converting scene numbering eps to indexer numbering.
This commit is contained in:
parent
d27e4642c4
commit
66e499ab23
@ -376,23 +376,16 @@ class ParseResult(object):
|
|||||||
|
|
||||||
return to_return.encode('utf-8')
|
return to_return.encode('utf-8')
|
||||||
|
|
||||||
def convert(self):
|
def convert(self, show):
|
||||||
if not self.series_name: return self # can't work without a series name
|
if not show: return self # need show object
|
||||||
|
if not self.season_number: return self # can't work without a season
|
||||||
|
if not len(self.episode_numbers): return self # need at least one episode
|
||||||
if self.air_by_date or self.sports: return self # scene numbering does not apply to air-by-date
|
if self.air_by_date or self.sports: return self # scene numbering does not apply to air-by-date
|
||||||
if self.season_number == None: return self # can't work without a season
|
|
||||||
if len(self.episode_numbers) == 0: return self # need at least one episode
|
|
||||||
|
|
||||||
showResult = helpers.searchDBForShow(self.series_name)
|
|
||||||
if showResult:
|
|
||||||
self.show = helpers.findCertainShow(sickbeard.showList, int(showResult[0]))
|
|
||||||
|
|
||||||
if not self.show:
|
|
||||||
return self
|
|
||||||
|
|
||||||
new_episode_numbers = []
|
new_episode_numbers = []
|
||||||
new_season_numbers = []
|
new_season_numbers = []
|
||||||
for epNo in self.episode_numbers:
|
for epNo in self.episode_numbers:
|
||||||
(s, e) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer, self.season_number,
|
(s, e) = scene_numbering.get_indexer_numbering(show.indexerid, show.indexer, self.season_number,
|
||||||
epNo)
|
epNo)
|
||||||
new_episode_numbers.append(e)
|
new_episode_numbers.append(e)
|
||||||
new_season_numbers.append(s)
|
new_season_numbers.append(s)
|
||||||
|
@ -289,11 +289,14 @@ class GenericProvider:
|
|||||||
# parse the file name
|
# parse the file name
|
||||||
try:
|
try:
|
||||||
myParser = NameParser(False)
|
myParser = NameParser(False)
|
||||||
parse_result = myParser.parse(title).convert()
|
parse_result = myParser.parse(title)
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
|
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# scene -> indexer numbering
|
||||||
|
parse_result = parse_result.convert(self.show)
|
||||||
|
|
||||||
if not (self.show.air_by_date or self.show.sports):
|
if not (self.show.air_by_date or self.show.sports):
|
||||||
if search_mode == 'sponly' and len(parse_result.episode_numbers):
|
if search_mode == 'sponly' and len(parse_result.episode_numbers):
|
||||||
logger.log(
|
logger.log(
|
||||||
@ -308,13 +311,13 @@ class GenericProvider:
|
|||||||
ep_obj.season) + ", ignoring", logger.DEBUG)
|
ep_obj.season) + ", ignoring", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
elif len(parse_result.episode_numbers) and (
|
elif len(parse_result.episode_numbers) and (
|
||||||
parse_result.season_number != ep_obj.season or ep_obj.episode not in parse_result.episode_numbers):
|
parse_result.season_number != ep_obj.scene_season or ep_obj.episode not in parse_result.episode_numbers):
|
||||||
logger.log(u"Episode " + title + " isn't " + str(ep_obj.season) + "x" + str(
|
logger.log(u"Episode " + title + " isn't " + str(ep_obj.season) + "x" + str(
|
||||||
ep_obj.episode) + ", skipping it", logger.DEBUG)
|
ep_obj.episode) + ", skipping it", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# we just use the existing info for normal searches
|
# we just use the existing info for normal searches
|
||||||
actual_season = season
|
actual_season = ep_obj.season
|
||||||
actual_episodes = parse_result.episode_numbers
|
actual_episodes = parse_result.episode_numbers
|
||||||
else:
|
else:
|
||||||
if not (parse_result.air_by_date or parse_result.sports):
|
if not (parse_result.air_by_date or parse_result.sports):
|
||||||
|
@ -13,18 +13,23 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import re
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import urllib
|
import urllib
|
||||||
|
import urlparse
|
||||||
|
import sys
|
||||||
import generic
|
import generic
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
|
||||||
|
from lib import requests
|
||||||
|
from lib.requests import exceptions
|
||||||
from sickbeard import classes
|
from sickbeard import classes
|
||||||
from sickbeard import logger, tvcache, exceptions
|
from sickbeard import logger, tvcache, exceptions
|
||||||
from sickbeard import helpers
|
from sickbeard import helpers
|
||||||
|
from sickbeard import clients
|
||||||
from sickbeard.common import cpu_presets
|
from sickbeard.common import cpu_presets
|
||||||
from sickbeard.exceptions import ex, AuthException
|
from sickbeard.exceptions import ex, AuthException
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import json
|
import json
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -41,13 +46,15 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||||||
self.enabled = False
|
self.enabled = False
|
||||||
self.username = None
|
self.username = None
|
||||||
self.password = None
|
self.password = None
|
||||||
|
self.uid = None
|
||||||
|
self.hash = None
|
||||||
self.ratio = None
|
self.ratio = None
|
||||||
|
|
||||||
self.cache = HDBitsCache(self)
|
self.cache = HDBitsCache(self)
|
||||||
|
|
||||||
self.url = 'https://hdbits.org'
|
self.url = 'http://hdbits.org'
|
||||||
self.search_url = 'https://hdbits.org/api/torrents'
|
self.search_url = 'http://hdbits.org/api/torrents'
|
||||||
self.rss_url = 'https://hdbits.org/api/torrents'
|
self.rss_url = 'http://hdbits.org/api/torrents'
|
||||||
self.download_url = 'http://hdbits.org/download.php?'
|
self.download_url = 'http://hdbits.org/download.php?'
|
||||||
|
|
||||||
def isEnabled(self):
|
def isEnabled(self):
|
||||||
@ -92,6 +99,38 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||||||
|
|
||||||
return (title, url)
|
return (title, url)
|
||||||
|
|
||||||
|
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||||
|
|
||||||
|
if not self.session:
|
||||||
|
self.session = requests.Session()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Remove double-slashes from url
|
||||||
|
parsed = list(urlparse.urlparse(url))
|
||||||
|
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
||||||
|
url = urlparse.urlunparse(parsed)
|
||||||
|
|
||||||
|
if sickbeard.PROXY_SETTING:
|
||||||
|
proxies = {
|
||||||
|
"http": sickbeard.PROXY_SETTING,
|
||||||
|
"https": sickbeard.PROXY_SETTING,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.session.get(url, data=post_data, proxies=proxies, verify=False)
|
||||||
|
else:
|
||||||
|
r = self.session.get(url, data=post_data, verify=False)
|
||||||
|
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||||
|
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if r.status_code != 200:
|
||||||
|
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||||
|
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||||
|
return None
|
||||||
|
if json:
|
||||||
|
return r.json()
|
||||||
|
return r.content
|
||||||
|
|
||||||
def _doSearch(self, search_params, epcount=0, age=0):
|
def _doSearch(self, search_params, epcount=0, age=0):
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
|
@ -100,14 +100,13 @@ class DailySearchQueueItem(generic_queue.QueueItem):
|
|||||||
if not len(foundResults):
|
if not len(foundResults):
|
||||||
logger.log(u"No needed episodes found during daily search for [" + self.show.name + "]")
|
logger.log(u"No needed episodes found during daily search for [" + self.show.name + "]")
|
||||||
else:
|
else:
|
||||||
for curEp in foundResults:
|
for result in foundResults:
|
||||||
for result in curEp:
|
# just use the first result for now
|
||||||
# just use the first result for now
|
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
||||||
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
search.snatchEpisode(result)
|
||||||
search.snatchEpisode(result)
|
|
||||||
|
|
||||||
# give the CPU a break
|
# give the CPU a break
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
generic_queue.QueueItem.finish(self)
|
generic_queue.QueueItem.finish(self)
|
||||||
|
|
||||||
|
@ -268,7 +268,7 @@ class TVCache():
|
|||||||
# if we don't have complete info then parse the filename to get it
|
# if we don't have complete info then parse the filename to get it
|
||||||
try:
|
try:
|
||||||
myParser = NameParser()
|
myParser = NameParser()
|
||||||
parse_result = myParser.parse(name).convert()
|
parse_result = myParser.parse(name)
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
||||||
return None
|
return None
|
||||||
@ -308,6 +308,9 @@ class TVCache():
|
|||||||
sickbeard.name_cache.addNameToCache(parse_result.series_name, 0)
|
sickbeard.name_cache.addNameToCache(parse_result.series_name, 0)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# scene -> indexer numbering
|
||||||
|
parse_result = parse_result.convert(showObj)
|
||||||
|
|
||||||
season = episodes = None
|
season = episodes = None
|
||||||
if parse_result.air_by_date or parse_result.sports:
|
if parse_result.air_by_date or parse_result.sports:
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
|
Loading…
Reference in New Issue
Block a user