1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-12 11:02:21 -05:00

Fixed further json issues with speedcd provider.

Fixed several providers and ssl cert verify issues.

Air-by-date shows now automatically get there respective season/episode numbers during release parsing.
This commit is contained in:
echel0n 2014-07-24 11:16:59 -07:00
parent b16ff81478
commit f47734446d
14 changed files with 104 additions and 146 deletions

View File

@ -37,7 +37,7 @@ class TransmissionAPI(GenericClient):
post_data = json.dumps({'method': 'session-get', })
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
self.response = self.session.post(self.url, data=post_data.encode('utf-8'), verify=sickbeard.TORRENT_VERIFY_CERT)
self.auth = re.search('X-Transmission-Session-Id:\s*(\w+)', self.response.text).group(1)
except:
return None

View File

@ -703,6 +703,9 @@ def update_anime_support():
def get_absolute_number_from_season_and_episode(show, season, episode):
absolute_number = None
if season and episode:
myDB = db.DBConnection()
sql = "SELECT * FROM tv_episodes WHERE showid = ? and season = ? and episode = ?"
sqlResults = myDB.select(sql, [show.indexerid, season, episode])
@ -711,23 +714,18 @@ def get_absolute_number_from_season_and_episode(show, season, episode):
absolute_number = int(sqlResults[0]["absolute_number"])
logger.log(
"Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode), logger.DEBUG)
return absolute_number
else:
logger.log(
"No entries for absolute number in show: " + show.name + " found using " + str(season) + "x" + str(episode),
logger.DEBUG)
return None
return absolute_number
def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=None):
if len(absolute_numbers) == 0:
raise EpisodeNotFoundByAbsoluteNumberException
episodes = []
season = None
if len(absolute_numbers):
if not show and indexer_id:
show = findCertainShow(sickbeard.showList, indexer_id)
@ -736,8 +734,6 @@ def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=Non
ep = show.getEpisode(None, None, absolute_number=absolute_number)
if ep:
episodes.append(ep.episode)
else:
raise EpisodeNotFoundByAbsoluteNumberException
season = ep.season # this will always take the last found seson so eps that cross the season border are not handeled well
return (season, episodes)

View File

@ -26,7 +26,7 @@ import os.path
import regexes
import sickbeard
from sickbeard import logger, helpers, scene_numbering, common, exceptions, scene_exceptions, encodingKludge as ek
from sickbeard import logger, helpers, scene_numbering, common, exceptions, scene_exceptions, encodingKludge as ek, db
from dateutil import parser
@ -89,14 +89,13 @@ class NameParser(object):
self.compiled_regexes = []
for regexItem in uncompiled_regex:
for i, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
try:
cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE)
except re.error, errormsg:
logger.log(u"WARNING: Invalid episode_pattern, %s. %s" % (errormsg, cur_pattern))
else:
cur_pattern_name = str(i) + "_" + cur_pattern_name
self.compiled_regexes.append((regexMode, cur_pattern_name, cur_regex))
self.compiled_regexes.append((regexMode, cur_pattern_num, cur_pattern_name, cur_regex))
def _parse_string(self, name):
if not name:
@ -111,16 +110,15 @@ class NameParser(object):
break
self._compile_regexes(regexMode)
for (cur_regexMode, cur_regex_name, cur_regex) in self.compiled_regexes:
for (cur_regexMode, cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes:
match = cur_regex.match(name)
if not match:
continue
regex_num = int(re.match('^\d{1,2}', cur_regex_name).group(0))
result = ParseResult(name)
result.which_regex = [cur_regex_name]
result.score = 0 - regex_num
result.score = 0 - cur_regex_num
named_groups = match.groupdict().keys()
@ -133,8 +131,6 @@ class NameParser(object):
# get show object
if not result.show and not self.naming_pattern:
result.show = helpers.get_show(result.series_name, self.tryIndexers)
elif self.showObj and self.naming_pattern:
result.show = self.showObj
# confirm result show object variables
if result.show:
@ -155,7 +151,8 @@ class NameParser(object):
if 'season_num' in named_groups:
tmp_season = int(match.group('season_num'))
if not (cur_regex_name == 'bare' and tmp_season in (19, 20)):
if cur_regex_name == 'bare' and tmp_season in (19, 20):
continue
result.season_number = tmp_season
result.score += 1
@ -197,7 +194,7 @@ class NameParser(object):
result.sports_air_date = parser.parse(sports_air_date, fuzzy=True).date()
result.score += 1
except:
pass
continue
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
if result.show and result.show.air_by_date:
@ -210,14 +207,15 @@ class NameParser(object):
result.air_date = datetime.datetime.strptime(dtStr, "%Y-%m-%d").date()
result.score += 1
except:
pass
continue
if 'extra_info' in named_groups:
tmp_extra_info = match.group('extra_info')
# Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
if not (tmp_extra_info and 'season_only' in cur_regex_name and re.search(
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I)):
if tmp_extra_info and cur_regex_name == 'season_only' and re.search(
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I):
continue
result.extra_info = tmp_extra_info
result.score += 1
@ -231,19 +229,48 @@ class NameParser(object):
# pick best match with highest score based on placement
bestResult = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score)
# if no show object was created check and see if we passed one in and use that instead
if not bestResult.show and self.showObj:
bestResult.show = self.showObj
# get quality
bestResult.quality = common.Quality.nameQuality(name,
bestResult.show.is_anime if bestResult.show else False)
# if this is a naming pattern test or result doesn't have a show object then return best result
if not bestResult.show or bestResult.is_air_by_date or bestResult.is_sports or self.naming_pattern:
if not bestResult.show or self.naming_pattern:
return bestResult
new_episode_numbers = []
new_season_numbers = []
new_absolute_numbers = []
if bestResult.show.is_anime and len(bestResult.ab_episode_numbers):
# if we have an air-by-date show then get the real season/episode numbers
if bestResult.is_air_by_date or bestResult.is_sports:
airdate = bestResult.air_date.toordinal() if bestResult.air_date else bestResult.sports_air_date.toordinal()
myDB = db.DBConnection()
sql_result = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[bestResult.show.indexerid, bestResult.show.indexer, airdate])
if sql_result:
season_number = int(sql_result[0][0])
episode_numbers = [int(sql_result[0][1])]
for epNo in episode_numbers:
s = season_number
e = epNo
if self.convert:
(s, e) = scene_numbering.get_indexer_numbering(bestResult.show.indexerid,
bestResult.show.indexer,
season_number,
epNo)
new_episode_numbers.append(e)
new_season_numbers.append(s)
elif bestResult.show.is_anime and len(bestResult.ab_episode_numbers):
scene_season = scene_exceptions.get_scene_exception_by_name(bestResult.series_name)[1]
for epAbsNo in bestResult.ab_episode_numbers:
a = epAbsNo
@ -252,13 +279,9 @@ class NameParser(object):
a = scene_numbering.get_indexer_absolute_numbering(bestResult.show.indexerid,
bestResult.show.indexer, epAbsNo,
True, scene_season)
try:
(s, e) = helpers.get_all_episodes_from_absolute_number(bestResult.show, [a])
except exceptions.EpisodeNotFoundByAbsoluteNumberException:
logger.log(str(bestResult.show.indexerid) + ": Indexer object absolute number " + str(
epAbsNo) + " is incomplete, skipping this episode")
return bestResult
else:
new_absolute_numbers.append(a)
new_episode_numbers.extend(e)
new_season_numbers.append(s)

View File

@ -135,10 +135,6 @@ class ProperFinder():
curProper.indexer = parse_result.show.indexer
# populate our Proper instance
if parse_result.is_air_by_date or parse_result.is_sports:
curProper.season = -1
curProper.episode = parse_result.air_date or parse_result.sports_air_date
else:
if parse_result.is_anime:
logger.log(u"I am sorry '"+curProper.name+"' seams to be an anime proper seach is not yet suported", logger.DEBUG)
continue
@ -163,36 +159,6 @@ class ProperFinder():
logger.MESSAGE)
continue
# if we have an air-by-date show then get the real season/episode numbers
if (parse_result.is_air_by_date or parse_result.is_sports) and curProper.indexerid:
logger.log(
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
logger.DEBUG)
airdate = curProper.episode.toordinal()
myDB = db.DBConnection()
sql_result = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[curProper.indexerid, curProper.indexer, airdate])
if sql_result:
curProper.season = int(sql_result[0][0])
curProper.episodes = [int(sql_result[0][1])]
else:
logger.log(u"Unable to find episode with date " + str(
curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING)
continue
# check if we actually want this proper (if it's the right quality)
myDB = db.DBConnection()
sqlResults = myDB.select(
"SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[curProper.indexerid, curProper.season, curProper.episode])
if not sqlResults:
continue
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)

View File

@ -86,7 +86,7 @@ class BitSoupProvider(generic.TorrentProvider):
self.session = requests.Session()
try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False

View File

@ -96,7 +96,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
self.session = requests.Session()
try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False

View File

@ -99,7 +99,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
}
try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False

View File

@ -82,7 +82,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
}
try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False

View File

@ -163,11 +163,14 @@ class SpeedCDProvider(generic.TorrentProvider):
post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 4, 'jxw': 'b', 'search': search_string},
**self.categories[mode])
data = self.session.post(self.urls['search'], data=post_data).json()
data = self.session.post(self.urls['search'], data=post_data, verify=False)
if not data:
continue
try:
# convert to json
data = data.json()
torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
except:
continue

View File

@ -86,7 +86,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
self.session = requests.Session()
try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False

View File

@ -95,7 +95,7 @@ class TorrentDayProvider(generic.TorrentProvider):
}
try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False
@ -194,9 +194,12 @@ class TorrentDayProvider(generic.TorrentProvider):
if self.freeleech:
post_data.update({'free': 'on'})
data = self.session.post(self.urls['search'], data=post_data).json()
data = self.session.post(self.urls['search'], data=post_data, verify=False)
if not data:
continue
try:
data = data.json()
torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
except:
continue

View File

@ -88,7 +88,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
self.session = requests.Session()
try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False

View File

@ -612,7 +612,7 @@ class TVShow(object):
logger.log(str(self.indexerid) + u": Creating episode object from " + file, logger.DEBUG)
try:
myParser = NameParser(True, showObj=self, tryIndexers=True)
myParser = NameParser(showObj=self, tryIndexers=True)
parse_result = myParser.parse(file)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + file + " into a valid episode", logger.DEBUG)
@ -621,7 +621,7 @@ class TVShow(object):
logger.log(u"Unable to parse the filename " + file + " into a valid show", logger.DEBUG)
return None
if not len(parse_result.episode_numbers) and not (parse_result.is_air_by_date or parse_result.is_sports):
if not len(parse_result.episode_numbers):
logger.log("parse_result: " + str(parse_result))
logger.log(u"No episode number found in " + file + ", ignoring it", logger.ERROR)
return None
@ -631,25 +631,6 @@ class TVShow(object):
episodes = parse_result.episode_numbers
rootEp = None
# if we have an air-by-date show then get the real season/episode numbers
if parse_result.is_air_by_date or parse_result.is_sports:
logger.log(
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
logger.DEBUG)
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
myDB = db.DBConnection()
sql_result = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[self.indexerid, self.indexer, airdate])
if sql_result:
season = int(sql_result[0][0])
episodes = [int(sql_result[0][1])]
else:
logger.log(u"Unable to find episode with date " + str(
parse_result.air_date) + " for show " + self.name + ", skipping", logger.WARNING)
return None
sql_l = []
for curEpNum in episodes:

View File

@ -18,8 +18,6 @@
from __future__ import with_statement
import os
import time
import datetime
import sickbeard
@ -253,18 +251,6 @@ class TVCache():
return None
# if we made it this far then lets add the parsed result to cache for usager later on
season = episodes = None
if parse_result.is_air_by_date or parse_result.is_sports:
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
myDB = db.DBConnection()
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
[parse_result.show.indexerid, parse_result.show.indexer, airdate])
if sql_results > 0:
season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])]
else:
season = parse_result.season_number if parse_result.season_number else 1
episodes = parse_result.episode_numbers