mirror of
https://github.com/moparisthebest/SickRage
synced 2024-12-12 11:02:21 -05:00
Added code to check and confirm that a daily search was performed on previous cache results before we go ahead with clearing the cache and wasting results that could hold potential items to be snatched.
This commit is contained in:
parent
628fe23f8d
commit
25e2430820
@ -27,6 +27,7 @@ class InitialSchema(db.SchemaUpgrade):
|
||||
|
||||
queries = [
|
||||
("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);",),
|
||||
("CREATE TABLE lastSearch (provider TEXT, time NUMERIC);",),
|
||||
("CREATE TABLE db_version (db_version INTEGER);",),
|
||||
("INSERT INTO db_version (db_version) VALUES (?)", 1),
|
||||
]
|
||||
@ -69,6 +70,7 @@ class AddXemNumbering(AddNetworkTimezones):
|
||||
def execute(self):
|
||||
self.connection.action(
|
||||
"CREATE TABLE xem_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER)")
|
||||
|
||||
class AddXemRefresh(AddXemNumbering):
|
||||
def test(self):
|
||||
return self.hasTable("xem_refresh")
|
||||
@ -120,3 +122,9 @@ class RemoveKeysFromXemNumbering(ConvertIndexerToInteger):
|
||||
self.connection.action("ALTER TABLE xem_numbering DROP UNIQUE (indexer, indexer_id, season, episode)")
|
||||
self.connection.action("ALTER TABLE xem_numbering DROP PRIMARY KEY")
|
||||
|
||||
class AddLastSearch(RemoveKeysFromXemNumbering):
|
||||
def test(self):
|
||||
return self.hasTable("lastSearch")
|
||||
|
||||
def execute(self):
|
||||
self.connection.action("CREATE TABLE lastSearch (provider TEXT, time NUMERIC)")
|
||||
|
@ -80,6 +80,8 @@ class TVCache():
|
||||
return CacheDBConnection(self.providerID)
|
||||
|
||||
def _clearCache(self):
|
||||
if not self.shouldClearCache():
|
||||
return
|
||||
|
||||
myDB = self._getDB()
|
||||
|
||||
@ -201,6 +203,19 @@ class TVCache():
|
||||
|
||||
return datetime.datetime.fromtimestamp(lastTime)
|
||||
|
||||
def _getLastSearch(self):
|
||||
myDB = self._getDB()
|
||||
sqlResults = myDB.select("SELECT time FROM lastSearch WHERE provider = ?", [self.providerID])
|
||||
|
||||
if sqlResults:
|
||||
lastTime = int(sqlResults[0]["time"])
|
||||
if lastTime > int(time.mktime(datetime.datetime.today().timetuple())):
|
||||
lastTime = 0
|
||||
else:
|
||||
lastTime = 0
|
||||
|
||||
return datetime.datetime.fromtimestamp(lastTime)
|
||||
|
||||
|
||||
def setLastUpdate(self, toDate=None):
|
||||
if not toDate:
|
||||
@ -211,9 +226,18 @@ class TVCache():
|
||||
{'time': int(time.mktime(toDate.timetuple()))},
|
||||
{'provider': self.providerID})
|
||||
|
||||
def setLastSearch(self, toDate=None):
|
||||
if not toDate:
|
||||
toDate = datetime.datetime.today()
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.upsert("lastSearch",
|
||||
{'time': int(time.mktime(toDate.timetuple()))},
|
||||
{'provider': self.providerID})
|
||||
|
||||
|
||||
lastUpdate = property(_getLastUpdate)
|
||||
|
||||
lastSearch = property(_getLastSearch)
|
||||
|
||||
def shouldUpdate(self):
|
||||
# if we've updated recently then skip the update
|
||||
@ -224,6 +248,14 @@ class TVCache():
|
||||
|
||||
return True
|
||||
|
||||
def shouldClearCache(self):
|
||||
# if daily search hasn't used our previous results yet then don't clear the cache
|
||||
if self.lastUpdate > self.lastSearch:
|
||||
logger.log(
|
||||
u"Daily search has not yet searched our last cache results, skipping clearig cache ...", logger.DEBUG)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _addCacheEntry(self, name, url, quality=None):
|
||||
indexerid = None
|
||||
@ -392,5 +424,8 @@ class TVCache():
|
||||
else:
|
||||
neededEps[epObj].append(result)
|
||||
|
||||
# datetime stamp this search so cache gets cleared
|
||||
self.setLastSearch()
|
||||
|
||||
return neededEps
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user