From b13e72e0a35ee67f9f2a0f1ba8c5e9904632cc0e Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 27 Jul 2014 04:58:14 -0700 Subject: [PATCH] Moved code for cleaning up cache folder to seperate function located in helper.py. Added call to cache folder cleanup during SickRage main init. Changed a error message in rss feed class to a debug message for when url returns no data. Moved indexer api cache files to be placed under cache folder subfolder indexers. Moved rss feed cache files to be placed under cache folder subfolder rss. --- sickbeard/__init__.py | 4 ++++ sickbeard/helpers.py | 39 ++++++++++++++++++++++++++++++- sickbeard/indexers/indexer_api.py | 2 +- sickbeard/providers/kat.py | 4 ---- sickbeard/rssfeeds.py | 14 ++++++----- sickbeard/showUpdater.py | 28 +--------------------- 6 files changed, 52 insertions(+), 39 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 14fa7d2b..8f710ab1 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -525,6 +525,10 @@ def initialize(consoleLogging=True): logger.log(u"!!! Creating local cache dir failed, using system default", logger.ERROR) CACHE_DIR = None + # clean cache folders + if CACHE_DIR: + helpers.clearCache() + GUI_NAME = check_setting_str(CFG, 'GUI', 'gui_name', 'slick') ACTUAL_LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', 'Logs') diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 0f870fc0..bc72628e 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -32,6 +32,7 @@ import urlparse import uuid import base64 import zipfile +import datetime import sickbeard import subliminal @@ -1306,4 +1307,40 @@ def download_file(url, filename, session=None): resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING) return False - return True \ No newline at end of file + return True + +def clearCache(force=False): + update_datetime = datetime.datetime.now() + + # clean out cache directory, remove everything > 12 hours old + if sickbeard.CACHE_DIR: + logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR) + + # Does our cache_dir exists + if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR): + logger.log(u"Can't clean " + sickbeard.CACHE_DIR + " if it doesn't exist", logger.WARNING) + else: + max_age = datetime.timedelta(hours=12) + + # Get all our cache files + for cache_root, cache_dirs, cache_files in os.walk(sickbeard.CACHE_DIR): + path = os.path.basename(cache_root) + + # skip rss provider caches + if path == 'rss': + continue + + for file in cache_files: + cache_file = ek.ek(os.path.join, cache_root, file) + + if ek.ek(os.path.isfile, cache_file): + cache_file_modified = datetime.datetime.fromtimestamp( + ek.ek(os.path.getmtime, cache_file)) + + if force or (update_datetime - cache_file_modified > max_age): + try: + ek.ek(os.remove, cache_file) + except OSError, e: + logger.log(u"Unable to clean " + cache_root + ": " + repr(e) + " / " + str(e), + logger.WARNING) + break \ No newline at end of file diff --git a/sickbeard/indexers/indexer_api.py b/sickbeard/indexers/indexer_api.py index 5e82d2f4..50c82738 100644 --- a/sickbeard/indexers/indexer_api.py +++ b/sickbeard/indexers/indexer_api.py @@ -47,7 +47,7 @@ class indexerApi(object): def api_params(self): if self.indexerID: if sickbeard.CACHE_DIR: - indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, self.name) + indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, 'indexers', self.name) if sickbeard.PROXY_SETTING: indexerConfig[self.indexerID]['api_params']['proxy'] = sickbeard.PROXY_SETTING diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index e9abc71c..e2e88924 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -224,7 +224,6 @@ class KATProvider(generic.TorrentProvider): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} - soup = None for mode in search_params.keys(): for search_string in search_params[mode]: @@ -379,13 +378,10 @@ class KATCache(tvcache.TVCache): if ci is not None: cl.append(ci) - - if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl) - def _parseItem(self, item): (title, url) = item diff --git a/sickbeard/rssfeeds.py b/sickbeard/rssfeeds.py index 3d3fbab6..9c59f9d9 100644 --- a/sickbeard/rssfeeds.py +++ b/sickbeard/rssfeeds.py @@ -16,7 +16,9 @@ from shove import Shove class RSSFeeds: def __init__(self, db_name): - self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, db_name + '.db') + self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name + '.db') + if not os.path.exists(os.path.dirname(self.db_name)): + sickbeard.helpers.makeDir(os.path.dirname(self.db_name)) def clearCache(self, age=None): try: @@ -24,7 +26,7 @@ class RSSFeeds: fc = cache.Cache(fs) fc.purge(age) except Exception as e: - logger.log(u"RSS cache error: " + ex(e), logger.DEBUG) + logger.log(u"RSS error clearing cache: " + ex(e), logger.DEBUG) def getFeed(self, url, post_data=None, request_headers=None): parsed = list(urlparse.urlparse(url)) @@ -39,7 +41,7 @@ class RSSFeeds: feed = fc.fetch(url, False, False, request_headers) if not feed or not feed.entries: - logger.log(u"RSS cache error loading url: " + url, logger.ERROR) + logger.log(u"RSS error loading url: " + url, logger.DEBUG) return elif 'error' in feed.feed: err_code = feed.feed['error']['code'] @@ -48,7 +50,7 @@ class RSSFeeds: logger.log( u"RSS ERROR:[%s] CODE:[%s]" % (err_desc, err_code), logger.DEBUG) return - - return feed + else: + return feed except Exception as e: - logger.log(u"RSS cache error: " + ex(e), logger.DEBUG) \ No newline at end of file + logger.log(u"RSS error: " + ex(e), logger.DEBUG) \ No newline at end of file diff --git a/sickbeard/showUpdater.py b/sickbeard/showUpdater.py index 2789c617..d4b08f8b 100644 --- a/sickbeard/showUpdater.py +++ b/sickbeard/showUpdater.py @@ -47,33 +47,7 @@ class ShowUpdater(): logger.log(u"Doing full update on all shows") # clean out cache directory, remove everything > 12 hours old - if sickbeard.CACHE_DIR: - for indexer in sickbeard.indexerApi().indexers: - cache_dir = sickbeard.indexerApi(indexer).cache - logger.log(u"Trying to clean cache folder " + cache_dir) - - # Does our cache_dir exists - if not ek.ek(os.path.isdir, cache_dir): - logger.log(u"Can't clean " + cache_dir + " if it doesn't exist", logger.WARNING) - else: - max_age = datetime.timedelta(hours=12) - # Get all our cache files - cache_files = ek.ek(os.listdir, cache_dir) - - for cache_file in cache_files: - cache_file_path = ek.ek(os.path.join, cache_dir, cache_file) - - if ek.ek(os.path.isfile, cache_file_path): - cache_file_modified = datetime.datetime.fromtimestamp( - ek.ek(os.path.getmtime, cache_file_path)) - - if update_datetime - cache_file_modified > max_age: - try: - ek.ek(os.remove, cache_file_path) - except OSError, e: - logger.log(u"Unable to clean " + cache_dir + ": " + repr(e) + " / " + str(e), - logger.WARNING) - break + sickbeard.helpers.clearCache() # select 10 'Ended' tv_shows updated more than 90 days ago to include in this update stale_should_update = []