mirror of
https://github.com/moparisthebest/SickRage
synced 2024-12-13 11:32:20 -05:00
More memory reductions and cleanups
This commit is contained in:
parent
adb4715b3e
commit
5e507b3849
@ -208,6 +208,5 @@ class BlackWhiteKeyword(object):
|
||||
self.range = range # "global" or a parser group
|
||||
self.value = values # a list of values may contain only one item (still a list)
|
||||
|
||||
|
||||
class BlackWhitelistNoShowIDException(Exception):
|
||||
"No show_id was given"
|
||||
|
@ -101,6 +101,7 @@ class DailySearcher():
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
if len(todaysEps):
|
||||
for show in todaysEps:
|
||||
@ -114,4 +115,6 @@ class DailySearcher():
|
||||
else:
|
||||
logger.log(u"Could not find any needed episodes to search for ...")
|
||||
|
||||
del todaysEps
|
||||
|
||||
self.amActive = False
|
@ -165,6 +165,7 @@ def update_network_dict():
|
||||
pass
|
||||
|
||||
myDB = db.DBConnection('cache.db')
|
||||
|
||||
# load current network timezones
|
||||
old_d = dict(myDB.select("SELECT * FROM network_timezones"))
|
||||
|
||||
@ -181,15 +182,17 @@ def update_network_dict():
|
||||
ql.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]])
|
||||
if h_k:
|
||||
del old_d[cur_d]
|
||||
|
||||
# remove deleted records
|
||||
if len(old_d) > 0:
|
||||
L = list(va for va in old_d)
|
||||
ql.append(["DELETE FROM network_timezones WHERE network_name IN (" + ','.join(['?'] * len(L)) + ")", L])
|
||||
|
||||
# change all network timezone infos at once (much faster)
|
||||
if ql:
|
||||
myDB.mass_action(ql)
|
||||
load_network_dict()
|
||||
|
||||
del ql
|
||||
|
||||
# load network timezones from db into dict
|
||||
def load_network_dict():
|
||||
|
@ -965,6 +965,7 @@ class PostProcessor(object):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
# find the destination folder
|
||||
try:
|
||||
@ -1042,6 +1043,7 @@ class PostProcessor(object):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
# log it to history
|
||||
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)
|
||||
|
@ -346,6 +346,7 @@ class BTNCache(tvcache.TVCache):
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
else:
|
||||
raise AuthException(
|
||||
|
@ -257,6 +257,7 @@ class HDBitsCache(tvcache.TVCache):
|
||||
if ql:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
del ql
|
||||
|
||||
else:
|
||||
raise exceptions.AuthException(
|
||||
|
@ -379,6 +379,7 @@ class HDTorrentsCache(tvcache.TVCache):
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -320,6 +320,7 @@ class IPTorrentsCache(tvcache.TVCache):
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -457,6 +457,7 @@ class KATCache(tvcache.TVCache):
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -344,6 +344,7 @@ class NewznabCache(tvcache.TVCache):
|
||||
if ql:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
del ql
|
||||
|
||||
else:
|
||||
raise AuthException(
|
||||
|
@ -369,6 +369,7 @@ class NextGenCache(tvcache.TVCache):
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -342,6 +342,7 @@ class PublicHDCache(tvcache.TVCache):
|
||||
if ql:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
del ql
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -364,6 +364,7 @@ class SCCCache(tvcache.TVCache):
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -304,6 +304,7 @@ class SpeedCDCache(tvcache.TVCache):
|
||||
if ql:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
del ql
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -437,6 +437,7 @@ class ThePirateBayCache(tvcache.TVCache):
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -328,6 +328,7 @@ class TorrentDayCache(tvcache.TVCache):
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -323,6 +323,7 @@ class TorrentLeechCache(tvcache.TVCache):
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
def _parseItem(self, item):
|
||||
|
||||
|
@ -48,30 +48,33 @@ class WombleCache(tvcache.TVCache):
|
||||
self._clearCache()
|
||||
|
||||
data = None
|
||||
|
||||
if not self.shouldUpdate():
|
||||
for url in [self.provider.url + 'rss/?sec=tv-sd&fr=false', self.provider.url + 'rss/?sec=tv-hd&fr=false']:
|
||||
logger.log(u"Womble's Index cache update URL: " + url, logger.DEBUG)
|
||||
data = self.getRSSFeed(url)
|
||||
return
|
||||
|
||||
# As long as we got something from the provider we count it as an update
|
||||
if not data:
|
||||
return []
|
||||
cl = []
|
||||
for url in [self.provider.url + 'rss/?sec=tv-sd&fr=false', self.provider.url + 'rss/?sec=tv-hd&fr=false']:
|
||||
logger.log(u"Womble's Index cache update URL: " + url, logger.DEBUG)
|
||||
data = self.getRSSFeed(url)
|
||||
|
||||
# By now we know we've got data and no auth errors, all we need to do is put it in the database
|
||||
cl = []
|
||||
for item in data.entries:
|
||||
# As long as we got something from the provider we count it as an update
|
||||
if not data:
|
||||
return []
|
||||
|
||||
ci = self._parseItem(item)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
# By now we know we've got data and no auth errors, all we need to do is put it in the database
|
||||
for item in data.entries:
|
||||
ci = self._parseItem(item)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
del cl
|
||||
|
||||
# set last updated
|
||||
if data:
|
||||
self.setLastUpdate()
|
||||
# set last updated
|
||||
if data:
|
||||
self.setLastUpdate()
|
||||
|
||||
def _checkAuth(self, data):
|
||||
return data != 'Invalid Link'
|
||||
|
@ -209,6 +209,10 @@ def retrieve_exceptions():
|
||||
else:
|
||||
logger.log(u"No scene exceptions update needed")
|
||||
|
||||
# cleanup
|
||||
del existing_exceptions
|
||||
del exception_dict
|
||||
|
||||
def update_scene_exceptions(indexer_id, scene_exceptions):
|
||||
"""
|
||||
Given a indexer_id, and a list of all show scene exceptions, update the db.
|
||||
@ -227,7 +231,6 @@ def update_scene_exceptions(indexer_id, scene_exceptions):
|
||||
|
||||
def _retrieve_anidb_mainnames():
|
||||
|
||||
|
||||
anidb_mainNames = {}
|
||||
|
||||
if shouldRefresh('anidb'):
|
||||
|
@ -528,10 +528,7 @@ def xem_refresh(indexer_id, indexer, force=False):
|
||||
if ql:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
# fix xem scene numbering issues
|
||||
# fix_xem_numbering(indexer_id, indexer)
|
||||
|
||||
del ql
|
||||
|
||||
def fix_xem_numbering(indexer_id, indexer):
|
||||
"""
|
||||
@ -694,4 +691,5 @@ def fix_xem_numbering(indexer_id, indexer):
|
||||
|
||||
if ql:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(ql)
|
||||
myDB.mass_action(ql)
|
||||
del ql
|
@ -175,6 +175,7 @@ def snatchEpisode(result, endStatus=SNATCHED):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
return True
|
||||
|
||||
|
@ -456,6 +456,7 @@ class TVShow(object):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
def loadEpisodesFromDB(self):
|
||||
|
||||
@ -577,6 +578,7 @@ class TVShow(object):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
# Done updating save last update date
|
||||
self.last_update_indexer = datetime.date.today().toordinal()
|
||||
@ -747,6 +749,7 @@ class TVShow(object):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
# creating metafiles on the root should be good enough
|
||||
if sickbeard.USE_FAILED_DOWNLOADS and rootEp is not None:
|
||||
@ -1002,6 +1005,7 @@ class TVShow(object):
|
||||
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
# remove self from show list
|
||||
sickbeard.showList = [x for x in sickbeard.showList if int(x.indexerid) != self.indexerid]
|
||||
@ -1076,6 +1080,7 @@ class TVShow(object):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
def airdateModifyStamp(self, ep_obj):
|
||||
"""
|
||||
@ -2415,3 +2420,4 @@ class TVEpisode(object):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
@ -129,7 +129,7 @@ class TVCache():
|
||||
if cl:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
del cl
|
||||
else:
|
||||
raise AuthException(
|
||||
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
||||
|
@ -1007,6 +1007,7 @@ class CMD_EpisodeSetStatus(ApiCall):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
extra_msg = ""
|
||||
if start_backlog:
|
||||
|
@ -3998,6 +3998,7 @@ class Home(MainHandler):
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
del sql_l
|
||||
|
||||
if int(status) == WANTED:
|
||||
msg = "Backlog was automatically started for the following seasons of <b>" + showObj.name + "</b>:<br />"
|
||||
|
Loading…
Reference in New Issue
Block a user