1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-11-10 19:35:08 -05:00

Merge pull request #720 from echel0n/dev

Dev
This commit is contained in:
echel0n 2014-08-14 21:55:15 -07:00
commit 6f426804f3
3 changed files with 36 additions and 12 deletions

View File

@ -859,17 +859,6 @@ class PostProcessor(object):
# for curEp in [ep_obj] + ep_obj.relatedEps:
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# delete the existing file (and company)
for cur_ep in [ep_obj] + ep_obj.relatedEps:
try:
self._delete(cur_ep.location, associated_files=True)
# clean up any left over folders
if cur_ep.location:
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location),
keep_dir=ep_obj.show._location)
except (OSError, IOError):
raise exceptions.PostProcessingFailed("Unable to delete the existing files")
# if the show directory doesn't exist then make it if allowed
if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS:
self._log(u"Show directory doesn't exist, creating it", logger.DEBUG)
@ -977,6 +966,17 @@ class PostProcessor(object):
except (OSError, IOError):
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
# delete the existing file (and company)
for cur_ep in [ep_obj] + ep_obj.relatedEps:
try:
self._delete(cur_ep.location, associated_files=True)
# clean up any left over folders
if cur_ep.location:
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location),
keep_dir=ep_obj.show._location)
except (OSError, IOError):
raise exceptions.PostProcessingFailed("Unable to delete the existing files")
# download subtitles
if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles:
for cur_ep in [ep_obj] + ep_obj.relatedEps:

View File

@ -154,6 +154,26 @@ class OmgwtfnzbsCache(tvcache.TVCache):
tvcache.TVCache.__init__(self, provider)
self.minTime = 20
def _get_title_and_url(self, item):
"""
Retrieves the title and URL data from the item XML node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns: A tuple containing two strings representing title and URL respectively
"""
title = item.title if item.title else None
if title:
title = u'' + title
title = title.replace(' ', '.')
url = item.link if item.link else None
if url:
url = url.replace('&amp;', '&')
return (title, url)
def _getDailyData(self):
params = {'user': provider.username,
'api': provider.api_key,

View File

@ -96,6 +96,10 @@ class TVCache():
myDB = self._getDB()
myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))])
def _get_title_and_url(self, item):
# override this in the provider if daily search has a different data layout to backlog searches
return self.provider._get_title_and_url(item)
def _getRSSData(self):
data = None
@ -128,7 +132,7 @@ class TVCache():
# parse data
cl = []
for item in data:
title, url = self.provider._get_title_and_url(item)
title, url = self._get_title_and_url(item)
ci = self._parseItem(title, url)
if ci is not None:
cl.append(ci)