mirror of
https://github.com/moparisthebest/SickRage
synced 2024-12-12 11:02:21 -05:00
Fixed further json issues with speedcd provider.
Fixed several providers and ssl cert verify issues. Air-by-date shows now automatically get there respective season/episode numbers during release parsing.
This commit is contained in:
parent
b16ff81478
commit
f47734446d
@ -37,7 +37,7 @@ class TransmissionAPI(GenericClient):
|
||||
post_data = json.dumps({'method': 'session-get', })
|
||||
|
||||
try:
|
||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'), verify=sickbeard.TORRENT_VERIFY_CERT)
|
||||
self.auth = re.search('X-Transmission-Session-Id:\s*(\w+)', self.response.text).group(1)
|
||||
except:
|
||||
return None
|
||||
|
@ -703,42 +703,38 @@ def update_anime_support():
|
||||
|
||||
|
||||
def get_absolute_number_from_season_and_episode(show, season, episode):
|
||||
myDB = db.DBConnection()
|
||||
sql = "SELECT * FROM tv_episodes WHERE showid = ? and season = ? and episode = ?"
|
||||
sqlResults = myDB.select(sql, [show.indexerid, season, episode])
|
||||
absolute_number = None
|
||||
|
||||
if len(sqlResults) == 1:
|
||||
absolute_number = int(sqlResults[0]["absolute_number"])
|
||||
logger.log(
|
||||
"Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode), logger.DEBUG)
|
||||
if season and episode:
|
||||
myDB = db.DBConnection()
|
||||
sql = "SELECT * FROM tv_episodes WHERE showid = ? and season = ? and episode = ?"
|
||||
sqlResults = myDB.select(sql, [show.indexerid, season, episode])
|
||||
|
||||
return absolute_number
|
||||
else:
|
||||
logger.log(
|
||||
"No entries for absolute number in show: " + show.name + " found using " + str(season) + "x" + str(episode),
|
||||
logger.DEBUG)
|
||||
|
||||
return None
|
||||
if len(sqlResults) == 1:
|
||||
absolute_number = int(sqlResults[0]["absolute_number"])
|
||||
logger.log(
|
||||
"Found absolute_number:" + str(absolute_number) + " by " + str(season) + "x" + str(episode), logger.DEBUG)
|
||||
else:
|
||||
logger.log(
|
||||
"No entries for absolute number in show: " + show.name + " found using " + str(season) + "x" + str(episode),
|
||||
logger.DEBUG)
|
||||
|
||||
return absolute_number
|
||||
|
||||
def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=None):
|
||||
if len(absolute_numbers) == 0:
|
||||
raise EpisodeNotFoundByAbsoluteNumberException
|
||||
|
||||
episodes = []
|
||||
season = None
|
||||
|
||||
if not show and indexer_id:
|
||||
show = findCertainShow(sickbeard.showList, indexer_id)
|
||||
if len(absolute_numbers):
|
||||
if not show and indexer_id:
|
||||
show = findCertainShow(sickbeard.showList, indexer_id)
|
||||
|
||||
if show:
|
||||
for absolute_number in absolute_numbers:
|
||||
ep = show.getEpisode(None, None, absolute_number=absolute_number)
|
||||
if ep:
|
||||
episodes.append(ep.episode)
|
||||
else:
|
||||
raise EpisodeNotFoundByAbsoluteNumberException
|
||||
season = ep.season # this will always take the last found seson so eps that cross the season border are not handeled well
|
||||
if show:
|
||||
for absolute_number in absolute_numbers:
|
||||
ep = show.getEpisode(None, None, absolute_number=absolute_number)
|
||||
if ep:
|
||||
episodes.append(ep.episode)
|
||||
season = ep.season # this will always take the last found seson so eps that cross the season border are not handeled well
|
||||
|
||||
return (season, episodes)
|
||||
|
||||
|
@ -26,7 +26,7 @@ import os.path
|
||||
import regexes
|
||||
import sickbeard
|
||||
|
||||
from sickbeard import logger, helpers, scene_numbering, common, exceptions, scene_exceptions, encodingKludge as ek
|
||||
from sickbeard import logger, helpers, scene_numbering, common, exceptions, scene_exceptions, encodingKludge as ek, db
|
||||
from dateutil import parser
|
||||
|
||||
|
||||
@ -89,14 +89,13 @@ class NameParser(object):
|
||||
|
||||
self.compiled_regexes = []
|
||||
for regexItem in uncompiled_regex:
|
||||
for i, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
|
||||
for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
|
||||
try:
|
||||
cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE)
|
||||
except re.error, errormsg:
|
||||
logger.log(u"WARNING: Invalid episode_pattern, %s. %s" % (errormsg, cur_pattern))
|
||||
else:
|
||||
cur_pattern_name = str(i) + "_" + cur_pattern_name
|
||||
self.compiled_regexes.append((regexMode, cur_pattern_name, cur_regex))
|
||||
self.compiled_regexes.append((regexMode, cur_pattern_num, cur_pattern_name, cur_regex))
|
||||
|
||||
def _parse_string(self, name):
|
||||
if not name:
|
||||
@ -111,16 +110,15 @@ class NameParser(object):
|
||||
break
|
||||
|
||||
self._compile_regexes(regexMode)
|
||||
for (cur_regexMode, cur_regex_name, cur_regex) in self.compiled_regexes:
|
||||
for (cur_regexMode, cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes:
|
||||
match = cur_regex.match(name)
|
||||
|
||||
if not match:
|
||||
continue
|
||||
|
||||
regex_num = int(re.match('^\d{1,2}', cur_regex_name).group(0))
|
||||
result = ParseResult(name)
|
||||
result.which_regex = [cur_regex_name]
|
||||
result.score = 0 - regex_num
|
||||
result.score = 0 - cur_regex_num
|
||||
|
||||
named_groups = match.groupdict().keys()
|
||||
|
||||
@ -133,8 +131,6 @@ class NameParser(object):
|
||||
# get show object
|
||||
if not result.show and not self.naming_pattern:
|
||||
result.show = helpers.get_show(result.series_name, self.tryIndexers)
|
||||
elif self.showObj and self.naming_pattern:
|
||||
result.show = self.showObj
|
||||
|
||||
# confirm result show object variables
|
||||
if result.show:
|
||||
@ -155,9 +151,10 @@ class NameParser(object):
|
||||
|
||||
if 'season_num' in named_groups:
|
||||
tmp_season = int(match.group('season_num'))
|
||||
if not (cur_regex_name == 'bare' and tmp_season in (19, 20)):
|
||||
result.season_number = tmp_season
|
||||
result.score += 1
|
||||
if cur_regex_name == 'bare' and tmp_season in (19, 20):
|
||||
continue
|
||||
result.season_number = tmp_season
|
||||
result.score += 1
|
||||
|
||||
if 'ep_num' in named_groups:
|
||||
ep_num = self._convert_number(match.group('ep_num'))
|
||||
@ -166,7 +163,7 @@ class NameParser(object):
|
||||
result.score += 1
|
||||
else:
|
||||
result.episode_numbers = [ep_num]
|
||||
result.score += 1
|
||||
result.score += 1
|
||||
|
||||
if 'ep_ab_num' in named_groups:
|
||||
ep_ab_num = self._convert_number(match.group('ep_ab_num'))
|
||||
@ -176,7 +173,7 @@ class NameParser(object):
|
||||
result.score += 1
|
||||
else:
|
||||
result.ab_episode_numbers = [ep_ab_num]
|
||||
result.score += 1
|
||||
result.score += 1
|
||||
|
||||
if 'sports_event_id' in named_groups:
|
||||
sports_event_id = match.group('sports_event_id')
|
||||
@ -197,7 +194,7 @@ class NameParser(object):
|
||||
result.sports_air_date = parser.parse(sports_air_date, fuzzy=True).date()
|
||||
result.score += 1
|
||||
except:
|
||||
pass
|
||||
continue
|
||||
|
||||
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
|
||||
if result.show and result.show.air_by_date:
|
||||
@ -210,16 +207,17 @@ class NameParser(object):
|
||||
result.air_date = datetime.datetime.strptime(dtStr, "%Y-%m-%d").date()
|
||||
result.score += 1
|
||||
except:
|
||||
pass
|
||||
continue
|
||||
|
||||
if 'extra_info' in named_groups:
|
||||
tmp_extra_info = match.group('extra_info')
|
||||
|
||||
# Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
|
||||
if not (tmp_extra_info and 'season_only' in cur_regex_name and re.search(
|
||||
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I)):
|
||||
result.extra_info = tmp_extra_info
|
||||
result.score += 1
|
||||
if tmp_extra_info and cur_regex_name == 'season_only' and re.search(
|
||||
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I):
|
||||
continue
|
||||
result.extra_info = tmp_extra_info
|
||||
result.score += 1
|
||||
|
||||
if 'release_group' in named_groups:
|
||||
result.release_group = match.group('release_group')
|
||||
@ -231,19 +229,48 @@ class NameParser(object):
|
||||
# pick best match with highest score based on placement
|
||||
bestResult = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score)
|
||||
|
||||
# if no show object was created check and see if we passed one in and use that instead
|
||||
if not bestResult.show and self.showObj:
|
||||
bestResult.show = self.showObj
|
||||
|
||||
# get quality
|
||||
bestResult.quality = common.Quality.nameQuality(name,
|
||||
bestResult.show.is_anime if bestResult.show else False)
|
||||
|
||||
# if this is a naming pattern test or result doesn't have a show object then return best result
|
||||
if not bestResult.show or bestResult.is_air_by_date or bestResult.is_sports or self.naming_pattern:
|
||||
if not bestResult.show or self.naming_pattern:
|
||||
return bestResult
|
||||
|
||||
new_episode_numbers = []
|
||||
new_season_numbers = []
|
||||
new_absolute_numbers = []
|
||||
|
||||
if bestResult.show.is_anime and len(bestResult.ab_episode_numbers):
|
||||
# if we have an air-by-date show then get the real season/episode numbers
|
||||
if bestResult.is_air_by_date or bestResult.is_sports:
|
||||
airdate = bestResult.air_date.toordinal() if bestResult.air_date else bestResult.sports_air_date.toordinal()
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sql_result = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
|
||||
[bestResult.show.indexerid, bestResult.show.indexer, airdate])
|
||||
|
||||
if sql_result:
|
||||
season_number = int(sql_result[0][0])
|
||||
episode_numbers = [int(sql_result[0][1])]
|
||||
|
||||
for epNo in episode_numbers:
|
||||
s = season_number
|
||||
e = epNo
|
||||
|
||||
if self.convert:
|
||||
(s, e) = scene_numbering.get_indexer_numbering(bestResult.show.indexerid,
|
||||
bestResult.show.indexer,
|
||||
season_number,
|
||||
epNo)
|
||||
new_episode_numbers.append(e)
|
||||
new_season_numbers.append(s)
|
||||
|
||||
elif bestResult.show.is_anime and len(bestResult.ab_episode_numbers):
|
||||
scene_season = scene_exceptions.get_scene_exception_by_name(bestResult.series_name)[1]
|
||||
for epAbsNo in bestResult.ab_episode_numbers:
|
||||
a = epAbsNo
|
||||
@ -252,16 +279,12 @@ class NameParser(object):
|
||||
a = scene_numbering.get_indexer_absolute_numbering(bestResult.show.indexerid,
|
||||
bestResult.show.indexer, epAbsNo,
|
||||
True, scene_season)
|
||||
try:
|
||||
(s, e) = helpers.get_all_episodes_from_absolute_number(bestResult.show, [a])
|
||||
except exceptions.EpisodeNotFoundByAbsoluteNumberException:
|
||||
logger.log(str(bestResult.show.indexerid) + ": Indexer object absolute number " + str(
|
||||
epAbsNo) + " is incomplete, skipping this episode")
|
||||
return bestResult
|
||||
else:
|
||||
new_absolute_numbers.append(a)
|
||||
new_episode_numbers.extend(e)
|
||||
new_season_numbers.append(s)
|
||||
|
||||
(s, e) = helpers.get_all_episodes_from_absolute_number(bestResult.show, [a])
|
||||
|
||||
new_absolute_numbers.append(a)
|
||||
new_episode_numbers.extend(e)
|
||||
new_season_numbers.append(s)
|
||||
|
||||
elif bestResult.season_number and len(bestResult.episode_numbers):
|
||||
for epNo in bestResult.episode_numbers:
|
||||
|
@ -135,16 +135,12 @@ class ProperFinder():
|
||||
curProper.indexer = parse_result.show.indexer
|
||||
|
||||
# populate our Proper instance
|
||||
if parse_result.is_air_by_date or parse_result.is_sports:
|
||||
curProper.season = -1
|
||||
curProper.episode = parse_result.air_date or parse_result.sports_air_date
|
||||
if parse_result.is_anime:
|
||||
logger.log(u"I am sorry '"+curProper.name+"' seams to be an anime proper seach is not yet suported", logger.DEBUG)
|
||||
continue
|
||||
else:
|
||||
if parse_result.is_anime:
|
||||
logger.log(u"I am sorry '"+curProper.name+"' seams to be an anime proper seach is not yet suported", logger.DEBUG)
|
||||
continue
|
||||
else:
|
||||
curProper.season = parse_result.season_number if parse_result.season_number != None else 1
|
||||
curProper.episode = parse_result.episode_numbers[0]
|
||||
curProper.season = parse_result.season_number if parse_result.season_number != None else 1
|
||||
curProper.episode = parse_result.episode_numbers[0]
|
||||
|
||||
curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
|
||||
|
||||
@ -163,36 +159,6 @@ class ProperFinder():
|
||||
logger.MESSAGE)
|
||||
continue
|
||||
|
||||
# if we have an air-by-date show then get the real season/episode numbers
|
||||
if (parse_result.is_air_by_date or parse_result.is_sports) and curProper.indexerid:
|
||||
logger.log(
|
||||
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
|
||||
airdate = curProper.episode.toordinal()
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sql_result = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
|
||||
[curProper.indexerid, curProper.indexer, airdate])
|
||||
|
||||
if sql_result:
|
||||
curProper.season = int(sql_result[0][0])
|
||||
curProper.episodes = [int(sql_result[0][1])]
|
||||
else:
|
||||
logger.log(u"Unable to find episode with date " + str(
|
||||
curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING)
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right quality)
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select(
|
||||
"SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[curProper.indexerid, curProper.season, curProper.episode])
|
||||
|
||||
if not sqlResults:
|
||||
continue
|
||||
|
||||
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
|
||||
|
||||
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
||||
|
@ -86,7 +86,7 @@ class BitSoupProvider(generic.TorrentProvider):
|
||||
self.session = requests.Session()
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
@ -166,7 +166,7 @@ class BitSoupProvider(generic.TorrentProvider):
|
||||
data = self.getURL(searchURL)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
with BS4Parser(data, "html.parser") as html:
|
||||
torrent_table = html.find('table', attrs={'class': 'koptekst'})
|
||||
|
@ -96,7 +96,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
||||
self.session = requests.Session()
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
@ -99,7 +99,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
@ -82,7 +82,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
@ -163,11 +163,14 @@ class SpeedCDProvider(generic.TorrentProvider):
|
||||
post_data = dict({'/browse.php?': None, 'cata': 'yes', 'jxt': 4, 'jxw': 'b', 'search': search_string},
|
||||
**self.categories[mode])
|
||||
|
||||
data = self.session.post(self.urls['search'], data=post_data).json()
|
||||
data = self.session.post(self.urls['search'], data=post_data, verify=False)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
try:
|
||||
# convert to json
|
||||
data = data.json()
|
||||
|
||||
torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||
except:
|
||||
continue
|
||||
|
@ -86,7 +86,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
||||
self.session = requests.Session()
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
@ -95,7 +95,7 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
@ -194,9 +194,12 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||
if self.freeleech:
|
||||
post_data.update({'free': 'on'})
|
||||
|
||||
data = self.session.post(self.urls['search'], data=post_data).json()
|
||||
data = self.session.post(self.urls['search'], data=post_data, verify=False)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
try:
|
||||
data = data.json()
|
||||
torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||
except:
|
||||
continue
|
||||
|
@ -88,7 +88,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
||||
self.session = requests.Session()
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
@ -612,7 +612,7 @@ class TVShow(object):
|
||||
logger.log(str(self.indexerid) + u": Creating episode object from " + file, logger.DEBUG)
|
||||
|
||||
try:
|
||||
myParser = NameParser(True, showObj=self, tryIndexers=True)
|
||||
myParser = NameParser(showObj=self, tryIndexers=True)
|
||||
parse_result = myParser.parse(file)
|
||||
except InvalidNameException:
|
||||
logger.log(u"Unable to parse the filename " + file + " into a valid episode", logger.DEBUG)
|
||||
@ -621,7 +621,7 @@ class TVShow(object):
|
||||
logger.log(u"Unable to parse the filename " + file + " into a valid show", logger.DEBUG)
|
||||
return None
|
||||
|
||||
if not len(parse_result.episode_numbers) and not (parse_result.is_air_by_date or parse_result.is_sports):
|
||||
if not len(parse_result.episode_numbers):
|
||||
logger.log("parse_result: " + str(parse_result))
|
||||
logger.log(u"No episode number found in " + file + ", ignoring it", logger.ERROR)
|
||||
return None
|
||||
@ -631,25 +631,6 @@ class TVShow(object):
|
||||
episodes = parse_result.episode_numbers
|
||||
rootEp = None
|
||||
|
||||
# if we have an air-by-date show then get the real season/episode numbers
|
||||
if parse_result.is_air_by_date or parse_result.is_sports:
|
||||
logger.log(
|
||||
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
|
||||
myDB = db.DBConnection()
|
||||
sql_result = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
|
||||
[self.indexerid, self.indexer, airdate])
|
||||
|
||||
if sql_result:
|
||||
season = int(sql_result[0][0])
|
||||
episodes = [int(sql_result[0][1])]
|
||||
else:
|
||||
logger.log(u"Unable to find episode with date " + str(
|
||||
parse_result.air_date) + " for show " + self.name + ", skipping", logger.WARNING)
|
||||
return None
|
||||
|
||||
sql_l = []
|
||||
for curEpNum in episodes:
|
||||
|
||||
|
@ -18,8 +18,6 @@
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
|
||||
import time
|
||||
import datetime
|
||||
import sickbeard
|
||||
@ -253,20 +251,8 @@ class TVCache():
|
||||
return None
|
||||
|
||||
# if we made it this far then lets add the parsed result to cache for usager later on
|
||||
season = episodes = None
|
||||
if parse_result.is_air_by_date or parse_result.is_sports:
|
||||
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sql_results = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
|
||||
[parse_result.show.indexerid, parse_result.show.indexer, airdate])
|
||||
if sql_results > 0:
|
||||
season = int(sql_results[0]["season"])
|
||||
episodes = [int(sql_results[0]["episode"])]
|
||||
else:
|
||||
season = parse_result.season_number if parse_result.season_number else 1
|
||||
episodes = parse_result.episode_numbers
|
||||
season = parse_result.season_number if parse_result.season_number else 1
|
||||
episodes = parse_result.episode_numbers
|
||||
|
||||
if season and episodes:
|
||||
# store episodes as a seperated string
|
||||
|
Loading…
Reference in New Issue
Block a user