2014-07-15 01:53:32 -04:00
|
|
|
from __future__ import with_statement
|
|
|
|
|
2014-06-29 06:05:33 -04:00
|
|
|
import os
|
|
|
|
import urllib
|
|
|
|
import urlparse
|
|
|
|
import re
|
|
|
|
import sickbeard
|
|
|
|
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard import encodingKludge as ek
|
2014-07-15 01:53:32 -04:00
|
|
|
from contextlib import closing
|
|
|
|
from sickbeard.exceptions import ex
|
2014-07-15 14:40:40 -04:00
|
|
|
from lib.feedcache import cache
|
|
|
|
from shove import Shove
|
2014-07-15 01:53:32 -04:00
|
|
|
|
2014-06-29 06:05:33 -04:00
|
|
|
|
|
|
|
class RSSFeeds:
|
|
|
|
def __init__(self, db_name):
|
2014-07-15 14:40:40 -04:00
|
|
|
self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, db_name + '.db')
|
2014-06-29 06:05:33 -04:00
|
|
|
|
|
|
|
def clearCache(self, age=None):
|
2014-07-15 01:53:32 -04:00
|
|
|
try:
|
2014-07-15 14:40:40 -04:00
|
|
|
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs:
|
2014-07-15 01:53:32 -04:00
|
|
|
fc = cache.Cache(fs)
|
|
|
|
fc.purge(age)
|
2014-07-15 14:40:40 -04:00
|
|
|
except:
|
|
|
|
os.remove(self.db_name)
|
|
|
|
try:
|
|
|
|
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs:
|
|
|
|
fc = cache.Cache(fs)
|
|
|
|
fc.purge(age)
|
|
|
|
except Exception as e:
|
|
|
|
logger.log(u"RSS cache error: " + ex(e), logger.DEBUG)
|
2014-06-29 06:05:33 -04:00
|
|
|
|
2014-06-30 13:48:18 -04:00
|
|
|
def getFeed(self, url, post_data=None, request_headers=None):
|
2014-07-14 22:00:53 -04:00
|
|
|
parsed = list(urlparse.urlparse(url))
|
|
|
|
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
|
|
|
|
|
|
|
if post_data:
|
|
|
|
url += urllib.urlencode(post_data)
|
|
|
|
|
2014-07-15 01:53:32 -04:00
|
|
|
try:
|
2014-07-15 14:40:40 -04:00
|
|
|
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs:
|
2014-07-15 01:53:32 -04:00
|
|
|
fc = cache.Cache(fs)
|
|
|
|
feed = fc.fetch(url, False, False, request_headers)
|
2014-07-15 14:40:40 -04:00
|
|
|
except:
|
|
|
|
os.remove(self.db_name)
|
|
|
|
try:
|
|
|
|
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as fs:
|
|
|
|
fc = cache.Cache(fs)
|
|
|
|
feed = fc.fetch(url, False, False, request_headers)
|
|
|
|
except Exception as e:
|
|
|
|
logger.log(u"RSS cache error: " + ex(e), logger.DEBUG)
|
|
|
|
feed = None
|
2014-07-14 22:00:53 -04:00
|
|
|
|
|
|
|
if not feed:
|
|
|
|
logger.log(u"RSS Error loading URL: " + url, logger.ERROR)
|
|
|
|
return
|
|
|
|
elif 'error' in feed.feed:
|
|
|
|
logger.log(u"RSS ERROR:[%s] CODE:[%s]" % (feed.feed['error']['description'], feed.feed['error']['code']),
|
|
|
|
logger.DEBUG)
|
|
|
|
return
|
|
|
|
elif not feed.entries:
|
|
|
|
logger.log(u"No RSS items found using URL: " + url, logger.WARNING)
|
|
|
|
return
|
|
|
|
|
|
|
|
return feed
|