From a5b72dea8424420e81392660fd0a04e811015e9e Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 4 May 2014 20:04:46 -0700 Subject: [PATCH] Fixes issues with findpropers and airdate. Fixes issues with unhashable dict types. Fixes issues with 100% CPU usage. Fixes issues with RSS feed cache. Fixes issues with scene numbering and converting back to indexer numbering. Fixes issues with backlog searches. Fixes issues with season pack searches. Fixes issues with BTN. --- sickbeard/__init__.py | 3 -- sickbeard/helpers.py | 24 ++++------ sickbeard/name_cache.py | 7 +++ sickbeard/postProcessor.py | 7 ++- sickbeard/properFinder.py | 14 ++---- sickbeard/providers/btn.py | 1 - sickbeard/providers/dtt.py | 2 +- sickbeard/providers/ezrss.py | 4 +- sickbeard/providers/generic.py | 6 +-- sickbeard/providers/hdbits.py | 4 +- sickbeard/providers/newzbin.py | 2 +- sickbeard/providers/newznab.py | 2 +- sickbeard/providers/nyaatorrents.py | 2 +- sickbeard/providers/rsstorrent.py | 2 +- sickbeard/search.py | 3 ++ sickbeard/search_queue.py | 50 ++++++++++++++------ sickbeard/tv.py | 71 ++++++++--------------------- sickbeard/tvcache.py | 20 ++++---- tests/xem_tests.py | 1 + 19 files changed, 104 insertions(+), 121 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index cb5c19b7..e25536d2 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -991,9 +991,6 @@ def initialize(consoleLogging=True): newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA) providerList = providers.makeProviderList() - # fix scene numbering in mainDB - scene_numbering.fix_scene_numbering() - # initialize newznab providers currentSearchScheduler = scheduler.Scheduler(searchCurrent.CurrentSearcher(), cycleTime=datetime.timedelta(minutes=SEARCH_FREQUENCY), diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 2bd5908e..fe131d53 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -946,33 +946,25 @@ def _check_against_names(name, show): def get_show_by_name(name): showObj = None - in_cache = False if not sickbeard.showList: return indexerid = sickbeard.name_cache.retrieveNameFromCache(name) - if indexerid: - in_cache = True showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name))) - for showName in showNames if not in_cache else []: - try: - showObj = [x for x in sickbeard.showList if _check_against_names(showName, x)][0] - indexerid = showObj.indexerid - except: - indexerid = 0 - - if indexerid: + for showName in showNames if not indexerid else []: + sceneResults = [x for x in sickbeard.showList if _check_against_names(showName, x)] + showObj = sceneResults[0] if len(sceneResults) else None + if showObj: break - if indexerid: + if showObj or indexerid: logger.log(u"Found Indexer ID:[" + repr(indexerid) + "], using that for [" + str(name) + "}",logger.DEBUG) if not showObj: - showObjList = [x for x in sickbeard.showList if x.indexerid == indexerid] - if len(showObjList): - showObj = showObjList[0] - return showObj + showObj = findCertainShow(sickbeard.showList, int(indexerid)) + + return showObj def is_hidden_folder(folder): """ diff --git a/sickbeard/name_cache.py b/sickbeard/name_cache.py index 95109b1c..345e3274 100644 --- a/sickbeard/name_cache.py +++ b/sickbeard/name_cache.py @@ -16,6 +16,8 @@ # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see . +import sickbeard + from sickbeard import db from sickbeard.helpers import sanitizeSceneName @@ -52,6 +54,11 @@ def retrieveNameFromCache(name): if cache_results: return int(cache_results[0]["indexer_id"]) +def retrieveShowFromCache(name): + indexerid = retrieveNameFromCache(name) + if indexerid: + return sickbeard.helpers.findCertainShow(sickbeard.showList, int(indexerid)) + def clearCache(): """ Deletes all "unknown" entries from the cache (names with indexer_id of 0). diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py index 42f6e02d..20aad248 100644 --- a/sickbeard/postProcessor.py +++ b/sickbeard/postProcessor.py @@ -623,10 +623,13 @@ class PostProcessor(object): for cur_episode in episodes: self._log(u"Retrieving episode object for " + str(season) + "x" + str(cur_episode), logger.DEBUG) + # detect and convert scene numbered releases + season, cur_episode = sickbeard.scene_numbering.get_indexer_numbering(indexer_id,indexer,season,cur_episode) + self._log(u"Episode object has been scene converted to " + str(season) + "x" + str(cur_episode), logger.DEBUG) + # now that we've figured out which episode this file is just load it manually try: - # convert scene numbered release and load episode from database - curEp = show_obj.getEpisode(season, cur_episode, scene=True) + curEp = show_obj.getEpisode(season, cur_episode) except exceptions.EpisodeNotFoundException, e: self._log(u"Unable to create episode: " + ex(e), logger.DEBUG) raise exceptions.PostProcessingFailed() diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index f8f76cd6..696c4e54 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -112,9 +112,9 @@ class ProperFinder(): continue # populate our Proper instance - if parse_result.air_by_date: + if parse_result.air_by_date or parse_result.sports: curProper.season = -1 - curProper.episode = parse_result.air_date + curProper.episode = parse_result.air_date or parse_result.sports_event_date else: curProper.scene_season = parse_result.season_number if parse_result.season_number != None else 1 curProper.scene_episode = parse_result.episode_numbers[0] @@ -178,15 +178,11 @@ class ProperFinder(): continue # if we have an air-by-date show then get the real season/episode numbers - if curProper.season == -1 and curProper.indexerid and curProper.indexer: + if (parse_result.air_by_date or parse_result.sports_event_date) and curProper.indexerid: logger.log( u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode", logger.DEBUG) - if curProper.airdate: - airdate = curProper.airdate.toordinal() - else: - airdate = None - + airdate = curProper.episode.toordinal() myDB = db.DBConnection() sql_result = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?", @@ -263,7 +259,7 @@ class ProperFinder(): showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid) if showObj == None: logger.log(u"Unable to find the show with indexerid " + str( - curProper.indexerid) + " so unable to download the proper", logger.ERROR) + curProper .indexerid) + " so unable to download the proper", logger.ERROR) continue epObj = showObj.getEpisode(curProper.season, curProper.episode) diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 184b4b02..3a2ce71f 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -117,7 +117,6 @@ class BTNProvider(generic.TorrentProvider): found_torrents.update(parsedJSON['torrents']) results = [] - for torrentid, torrent_info in found_torrents.iteritems(): (title, url) = self._get_title_and_url(torrent_info) diff --git a/sickbeard/providers/dtt.py b/sickbeard/providers/dtt.py index 6fcbafb2..93bb967b 100644 --- a/sickbeard/providers/dtt.py +++ b/sickbeard/providers/dtt.py @@ -130,7 +130,7 @@ class DTTCache(tvcache.TVCache): def _parseItem(self, item): title, url = self.provider._get_title_and_url(item) - logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) + logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py index 58fcdf46..aebf0606 100644 --- a/sickbeard/providers/ezrss.py +++ b/sickbeard/providers/ezrss.py @@ -130,7 +130,7 @@ class EZRSSProvider(generic.TorrentProvider): (title, url) = self._get_title_and_url(curItem) if title and url: - logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG) + logger.log(u"Adding item from [" + self.name + "] RSS feed to cache: " + title, logger.DEBUG) results.append(curItem) else: logger.log( @@ -180,7 +180,7 @@ class EZRSSCache(tvcache.TVCache): (title, url) = self.provider._get_title_and_url(item) if title and url: - logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) + logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG) url = self._translateLinkURL(url) return self._addCacheEntry(title, url) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index a5f22a13..6facb54e 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -250,15 +250,13 @@ class GenericProvider: searchStrings += self._get_episode_search_strings(epObj) # remove duplicate search strings - if len(searchStrings): - searchStrings = [i for n, i in enumerate(searchStrings) if i not in searchStrings[n + 1:]] + searchStrings = [i for n, i in enumerate(searchStrings) if i not in searchStrings[n + 1:]] for curString in sorted(searchStrings): itemList += self._doSearch(curString) # remove duplicate items - if len(itemList): - itemList = list(set(itemList)) + itemList = [i for n, i in enumerate(itemList) if i not in itemList[n + 1:]] for item in itemList: diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index e33f3165..d863e8f9 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -83,7 +83,9 @@ class HDBitsProvider(generic.TorrentProvider): season = ep_obj.scene_season episode = ep_obj.scene_episode - self.cache.updateCache() + if manualSearch: + self.cache.updateCache() + results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: " + str(results), logger.DEBUG) diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py index fb5a0cdb..9c0c28d2 100644 --- a/sickbeard/providers/newzbin.py +++ b/sickbeard/providers/newzbin.py @@ -356,7 +356,7 @@ class NewzbinCache(tvcache.TVCache): logger.log("Found quality " + str(quality), logger.DEBUG) - logger.log("Adding item from RSS to cache: " + title, logger.DEBUG) + logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG) self._addCacheEntry(title, url, quality=quality) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index cdd2d337..cf59ee03 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -209,7 +209,7 @@ class NewznabProvider(generic.NZBProvider): (title, url) = self._get_title_and_url(curItem) if title and url: - logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG) + logger.log(u"Adding item from [" + self.name + "] RSS feed to cache: " + title,logger.DEBUG) results.append(curItem) else: logger.log( diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index e9cfd77d..0d02a4f9 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -143,7 +143,7 @@ class NyaaCache(tvcache.TVCache): logger.ERROR) return None - logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) + logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index 1c0f2bc5..47d9693a 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -164,5 +164,5 @@ class TorrentRssCache(tvcache.TVCache): logger.log(u"The XML returned from the RSS feed is incomplete, this result is unusable", logger.ERROR) return None - logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG) + logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) \ No newline at end of file diff --git a/sickbeard/search.py b/sickbeard/search.py index 38b90fa4..54785f3f 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -373,6 +373,9 @@ def searchProviders(show, season, episodes, seasonSearch=False, manualSearch=Fal if not curProvider.isActive(): continue + if manualSearch: + curProvider.cache.updateCache() + try: curResults = curProvider.findSearchResults(show, season, episodes, seasonSearch, manualSearch) except exceptions.AuthException, e: diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index 9cf38961..76e3c81b 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -29,6 +29,7 @@ from sickbeard import ui BACKLOG_SEARCH = 10 RSS_SEARCH = 20 +FAILED_SEARCH = 30 MANUAL_SEARCH = 30 @@ -47,8 +48,6 @@ class SearchQueue(generic_queue.GenericQueue): for cur_item in self.queue: if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj: return True - if isinstance(cur_item, BacklogQueueItem) and cur_item.ep_obj == ep_obj: - return True return False def pause_backlog(self): @@ -92,7 +91,11 @@ class ManualSearchQueueItem(generic_queue.QueueItem): def execute(self): generic_queue.QueueItem.execute(self) - logger.log("Beginning manual search for " + self.ep_obj.prettyName()) + # convert indexer numbering to scene numbering for searches + (self.ep_obj.scene_season, self.ep_obj.scene_episode) = sickbeard.scene_numbering.get_scene_numbering( + self.ep_obj.show.indexerid, self.ep_obj.show.indexer, self.ep_obj.season, self.ep_obj.episode) + + logger.log("Beginning manual search for " + self.ep_obj.prettyName() + ' as ' + self.ep_obj.prettySceneName()) foundResults = search.searchProviders(self.ep_obj.show, self.ep_obj.season, [self.ep_obj], manualSearch=True) result = False @@ -134,7 +137,7 @@ class RSSSearchQueueItem(generic_queue.QueueItem): self._changeMissingEpisodes() - logger.log(u"Beginning search for new episodes on RSS") + logger.log(u"Beginning search for new episodes on RSS feeds and in cache") foundResults = search.searchForNeededEpisodes() @@ -188,7 +191,6 @@ class BacklogQueueItem(generic_queue.QueueItem): self.show = show self.segment = segment self.wantedEpisodes = [] - self.seasonSearch = False logger.log(u"Seeing if we need any episodes from " + self.show.name + " season " + str(self.segment)) @@ -215,16 +217,29 @@ class BacklogQueueItem(generic_queue.QueueItem): anyQualities, bestQualities = common.Quality.splitQuality(self.show.quality) #@UnusedVariable self.wantedEpisodes = self._need_any_episodes(statusResults, bestQualities) - # check if we want to search for season packs instead of just season/episode - seasonEps = show.getAllEpisodes(self.segment) - if len(seasonEps) == len(self.wantedEpisodes): - self.seasonSearch = True - def execute(self): generic_queue.QueueItem.execute(self) - results = search.searchProviders(self.show, self.segment, self.wantedEpisodes, seasonSearch=self.seasonSearch) + # check if we want to search for season packs instead of just season/episode + seasonSearch = False + seasonEps = self.show.getAllEpisodes(self.segment) + if len(seasonEps) == len(self.wantedEpisodes): + seasonSearch = True + + # convert indexer numbering to scene numbering for searches + for i, epObj in enumerate(self.wantedEpisodes): + (self.wantedEpisodes[i].scene_season, + self.wantedEpisodes[i].scene_episode) = sickbeard.scene_numbering.get_scene_numbering(self.show.indexerid, + self.show.indexer, + epObj.season, + epObj.episode) + logger.log( + "Beginning backlog search for " + self.wantedEpisodes[i].prettyName() + ' as ' + self.wantedEpisodes[ + i].prettySceneName()) + + # search for our wanted items and return the results + results = search.searchProviders(self.show, self.segment, self.wantedEpisodes, seasonSearch=seasonSearch) # download whatever we find for curResult in results: @@ -250,7 +265,7 @@ class BacklogQueueItem(generic_queue.QueueItem): # if we need a better one then say yes if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED: - epObj = self.show.getEpisode(self.segment,episode) + epObj = self.show.getEpisode(self.segment, episode) wantedEpisodes.append(epObj) return wantedEpisodes @@ -258,7 +273,7 @@ class BacklogQueueItem(generic_queue.QueueItem): class FailedQueueItem(generic_queue.QueueItem): def __init__(self, show, episodes): - generic_queue.QueueItem.__init__(self, 'Retry', MANUAL_SEARCH) + generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH self.thread_name = 'RETRY-' + str(show.indexerid) @@ -272,7 +287,14 @@ class FailedQueueItem(generic_queue.QueueItem): episodes = [] - for epObj in episodes: + for i, epObj in enumerate(episodes): + # convert indexer numbering to scene numbering for searches + (episodes[i].scene_season, self.episodes[i].scene_episode) = sickbeard.scene_numbering.get_scene_numbering( + self.show.indexerid, self.show.indexer, epObj.season, epObj.episode) + + logger.log( + "Beginning failed download search for " + epObj.prettyName() + ' as ' + epObj.prettySceneName()) + (release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode) if release: logger.log(u"Marking release as bad: " + release) diff --git a/sickbeard/tv.py b/sickbeard/tv.py index c8a08637..c894f6be 100644 --- a/sickbeard/tv.py +++ b/sickbeard/tv.py @@ -182,7 +182,7 @@ class TVShow(object): return ep_list - def getEpisode(self, season, episode, file=None, noCreate=False, scene=False): + def getEpisode(self, season, episode, file=None, noCreate=False): if not season in self.episodes: self.episodes[season] = {} @@ -197,15 +197,9 @@ class TVShow(object): episode) + " didn't exist in the cache, trying to create it", logger.DEBUG) if file != None: - if scene: - ep = TVEpisode(self, scene_season=season, scene_episode=episode, file=file) - else: - ep = TVEpisode(self, season, episode, file) + ep = TVEpisode(self, season, episode, file) else: - if scene: - ep = TVEpisode(self, scene_season=season, scene_episode=episode) - else: - ep = TVEpisode(self, season, episode, file) + ep = TVEpisode(self, season, episode, file) if ep != None: self.episodes[season][episode] = ep @@ -529,11 +523,11 @@ class TVShow(object): rootEp = None # if we have an air-by-date show then get the real season/episode numbers - if parse_result.air_by_date: + if parse_result.air_by_date or parse_result.sports: logger.log( u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode", logger.DEBUG) - airdate = parse_result.air_date.toordinal() + airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal() myDB = db.DBConnection() sql_result = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?", @@ -1134,12 +1128,12 @@ def dirty_setter(attr_name): class TVEpisode(object): - def __init__(self, show, season=None, episode=None, scene_season=None, scene_episode=None, file=""): + def __init__(self, show, season, episode, file=""): self._name = "" self._season = season self._episode = episode - self._scene_season = scene_season - self._scene_episode = scene_episode + self._scene_season = -1 + self._scene_episode = -1 self._description = "" self._subtitles = list() self._subtitles_searchcount = 0 @@ -1164,7 +1158,7 @@ class TVEpisode(object): self.lock = threading.Lock() - self.specifyEpisode(self.season, self.episode, self.scene_season, self.scene_episode) + self.specifyEpisode(self.season, self.episode) self.relatedEps = [] @@ -1282,7 +1276,7 @@ class TVEpisode(object): cur_tbn = False # check for nfo and tbn - if ek.ek(os.path.isfile, self.location): + if self.location and ek.ek(os.path.isfile, self.location): for cur_provider in sickbeard.metadata_provider_dict.values(): if cur_provider.episode_metadata: new_result = cur_provider._has_episode_metadata(self) @@ -1302,11 +1296,11 @@ class TVEpisode(object): # if either setting has changed return true, if not return false return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn - def specifyEpisode(self, season, episode, scene_season=None, scene_episode=None): + def specifyEpisode(self, season, episode): sqlResult = self.loadFromDB(season, episode) - if not sqlResult: + if not sqlResult and self.location: # only load from NFO if we didn't load from DB if ek.ek(os.path.isfile, self.location): try: @@ -1319,7 +1313,7 @@ class TVEpisode(object): # if we tried loading it from NFO and didn't find the NFO, try the Indexers if not self.hasnfo: try: - result = self.loadFromIndexer(season, episode, scene_season, scene_episode) + result = self.loadFromIndexer(season, episode) except exceptions.EpisodeDeletedException: result = False @@ -1328,25 +1322,6 @@ class TVEpisode(object): raise exceptions.EpisodeNotFoundException( "Couldn't find episode " + str(season) + "x" + str(episode)) - # convert from indexer numbering <-> scene numerbing and back again once we have correct season and episode numbers - if self.season and self.episode: - self.scene_season, self.scene_episode = sickbeard.scene_numbering.get_scene_numbering(self.show.indexerid, - self.show.indexer, - self.season, self.episode) - self.season, self.episode = sickbeard.scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer, - self.scene_season, - self.scene_episode) - - # convert from scene numbering <-> indexer numbering and back again once we have correct season and episode numbers - elif self.scene_season and self.scene_episode: - self.season, self.episode = sickbeard.scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer, - self.scene_season, - self.scene_episode) - self.scene_season, self.scene_episode = sickbeard.scene_numbering.get_scene_numbering(self.show.indexerid, - self.show.indexer, - self.season, - self.episode) - def loadFromDB(self, season, episode): logger.log( @@ -1370,8 +1345,6 @@ class TVEpisode(object): self.season = season self.episode = episode - self.scene_season = sqlResults[0]["scene_season"] - self.scene_episode = sqlResults[0]["scene_episode"] self.description = sqlResults[0]["description"] if not self.description: self.description = "" @@ -1403,18 +1376,13 @@ class TVEpisode(object): self.dirty = False return True - def loadFromIndexer(self, season=None, episode=None, scene_season=None, scene_episode=None, cache=True, tvapi=None, cachedSeason=None): + def loadFromIndexer(self, season=None, episode=None, cache=True, tvapi=None, cachedSeason=None): if season is None: season = self.season if episode is None: episode = self.episode - if scene_season is None: - scene_season = self.scene_season - if scene_episode is None: - scene_episode = self.scene_episode - logger.log(str(self.show.indexerid) + u": Loading episode details from " + sickbeard.indexerApi( self.show.indexer).name + " for episode " + str(season) + "x" + str(episode), logger.DEBUG) @@ -1705,13 +1673,12 @@ class TVEpisode(object): # use a custom update/insert method to get the data into the DB return [ - "INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, scene_season, scene_episode) VALUES " - "((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);", + "INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode) VALUES " + "((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);", [self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, - self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.scene_season, - self.scene_episode]] + self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode]] def saveToDB(self, forceSave=False): """ @@ -1745,9 +1712,7 @@ class TVEpisode(object): "location": self.location, "file_size": self.file_size, "release_name": self.release_name, - "is_proper": self.is_proper, - "scene_season": self.scene_season, - "scene_episode": self.scene_episode} + "is_proper": self.is_proper} controlValueDict = {"showid": self.show.indexerid, "season": self.season, "episode": self.episode} diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 3d31f41c..59c5d58f 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -63,6 +63,7 @@ class CacheDBConnection(db.DBConnection): if str(e) != "table lastUpdate already exists": raise + class TVCache(): def __init__(self, provider): @@ -94,7 +95,7 @@ class TVCache(): def getRSSFeed(self, url, post_data=None): # create provider storaqe cache - storage = Shove('file://' + ek.ek(os.path.join, sickbeard.CACHE_DIR, self.providerID)) + storage = Shove('sqlite:///' + ek.ek(os.path.join, sickbeard.CACHE_DIR, self.provider.name) + '.db') fc = cache.Cache(storage) parsed = list(urlparse.urlparse(url)) @@ -209,7 +210,6 @@ class TVCache(): lastUpdate = property(_getLastUpdate) def shouldUpdate(self): - return True # if we've updated recently then skip the update if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime): logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str( @@ -220,10 +220,10 @@ class TVCache(): def _addCacheEntry(self, name, url, quality=None): - cacheResult = sickbeard.name_cache.retrieveNameFromCache(name) if cacheResult: - logger.log(u"Found Indexer ID:[" + repr(cacheResult) + "], using that for [" + str(name) + "}", logger.DEBUG) + logger.log(u"Found Indexer ID:[" + repr(cacheResult) + "], using that for [" + str(name) + "}", + logger.DEBUG) return # if we don't have complete info then parse the filename to get it @@ -242,19 +242,16 @@ class TVCache(): logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) return None - showObj = helpers.get_show_by_name(parse_result.series_name) + showObj = sickbeard.name_cache.retrieveShowFromCache(parse_result.series_name) if not showObj: - logger.log(u"Could not find a show matching " + parse_result.series_name + " in the database, skipping ...", logger.DEBUG) + logger.log(u"Cache lookup failed for [" + parse_result.series_name + "], skipping ...", logger.DEBUG) return None - logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) - sickbeard.name_cache.addNameToCache(name, showObj.indexerid) - season = episodes = None - if parse_result.air_by_date: + if parse_result.air_by_date or parse_result.sports: myDB = db.DBConnection() - airdate = parse_result.air_date.toordinal() + airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal() sql_results = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?", [showObj.indexerid, showObj.indexer, airdate]) @@ -280,6 +277,7 @@ class TVCache(): name = unicode(name, 'utf-8') logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) + sickbeard.name_cache.addNameToCache(name, showObj.indexerid) return [ "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", [name, season, episodeText, showObj.indexerid, url, curTimestamp, quality]] diff --git a/tests/xem_tests.py b/tests/xem_tests.py index a9f0f69e..e2045208 100644 --- a/tests/xem_tests.py +++ b/tests/xem_tests.py @@ -68,6 +68,7 @@ class XEMBasicTests(test.SickbeardTestDBCase): # parse the name to break it into show name, season, and episode np = NameParser(file) parse_result = np.parse(release).convert() + airdate = parse_result.sports_event_date.toordinal() print(parse_result)