Reverted some changes.

This commit is contained in:
echel0n 2014-06-30 08:57:32 -07:00
parent 0e665cbbb2
commit 75f69396d4
26 changed files with 68 additions and 76 deletions

View File

@ -19,6 +19,7 @@
# Check needed software dependencies to nudge users to fix their setup # Check needed software dependencies to nudge users to fix their setup
from __future__ import with_statement from __future__ import with_statement
import functools
import sys import sys
import shutil import shutil
@ -377,31 +378,35 @@ def main():
sickbeard.launchBrowser(startPort) sickbeard.launchBrowser(startPort)
sys.exit() sys.exit()
def startup(): # Build from the DB to start with
# Build from the DB to start with loadShowsFromDB()
loadShowsFromDB()
# Fire up all our threads # Fire up all our threads
sickbeard.start() sickbeard.start()
# Launch browser if we're supposed to # Launch browser if we're supposed to
if sickbeard.LAUNCH_BROWSER and not noLaunch and not sickbeard.DAEMON and not sickbeard.restarted: if sickbeard.LAUNCH_BROWSER and not noLaunch:
sickbeard.launchBrowser(startPort) sickbeard.launchBrowser(startPort)
# Start an update if we're supposed to # Start an update if we're supposed to
if forceUpdate or sickbeard.UPDATE_SHOWS_ON_START: if forceUpdate or sickbeard.UPDATE_SHOWS_ON_START:
sickbeard.showUpdateScheduler.action.run(force=True) # @UndefinedVariable sickbeard.showUpdateScheduler.action.run(force=True) # @UndefinedVariable
# If we restarted then unset the restarted flag # If we restarted then unset the restarted flag
if sickbeard.restarted: if sickbeard.restarted:
sickbeard.restarted = False sickbeard.restarted = False
# create ioloop # IOLoop
io_loop = IOLoop.current() io_loop = IOLoop.current()
io_loop.add_timeout(datetime.timedelta(seconds=5), startup) # Open browser window
if sickbeard.LAUNCH_BROWSER and not (noLaunch or sickbeard.DAEMON or sickbeard.restarted):
io_loop.add_timeout(datetime.timedelta(seconds=5), functools.partial(sickbeard.launchBrowser, startPort))
# Start web server
io_loop.start() io_loop.start()
# Save and restart/shutdown
sickbeard.saveAndShutdown() sickbeard.saveAndShutdown()
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -85,10 +85,10 @@ class DailySearcher():
with ep.lock: with ep.lock:
if ep.show.paused: if ep.show.paused:
ep.status = common.SKIPPED ep.status = common.SKIPPED
else:
if ep.status == common.UNAIRED: if ep.status == common.UNAIRED:
logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED") logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED")
ep.status = common.WANTED ep.status = common.WANTED
if ep.status == common.WANTED: if ep.status == common.WANTED:
if show not in todaysEps: if show not in todaysEps:
@ -101,7 +101,7 @@ class DailySearcher():
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
if len(todaysEps): if len(todaysEps):
for show in todaysEps: for show in todaysEps:
@ -115,6 +115,4 @@ class DailySearcher():
else: else:
logger.log(u"Could not find any needed episodes to search for ...") logger.log(u"Could not find any needed episodes to search for ...")
del todaysEps
self.amActive = False self.amActive = False

View File

@ -111,7 +111,7 @@ class DBConnection(object):
if self.hasTable('db_version'): if self.hasTable('db_version'):
result = self.select("SELECT db_version FROM db_version") result = self.select("SELECT db_version FROM db_version")
except: except:
pass return 0
if result: if result:
return int(result[0]["db_version"]) return int(result[0]["db_version"])
@ -153,7 +153,7 @@ class DBConnection(object):
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
logger.log(u"DB error: " + ex(e), logger.WARNING) logger.log(u"DB error: " + ex(e), logger.WARNING)
attempt += 1 attempt += 1
time.sleep(0.02) time.sleep(1)
else: else:
logger.log(u"DB error: " + ex(e), logger.ERROR) logger.log(u"DB error: " + ex(e), logger.ERROR)
raise raise
@ -164,7 +164,7 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR) logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR)
raise raise
return sqlResult return sqlResult
def action(self, query, args=None, fetchall=False, fetchone=False): def action(self, query, args=None, fetchall=False, fetchone=False):
@ -191,7 +191,7 @@ class DBConnection(object):
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
logger.log(u"DB error: " + ex(e), logger.WARNING) logger.log(u"DB error: " + ex(e), logger.WARNING)
attempt += 1 attempt += 1
time.sleep(0.02) time.sleep(1)
else: else:
logger.log(u"DB error: " + ex(e), logger.ERROR) logger.log(u"DB error: " + ex(e), logger.ERROR)
raise raise
@ -199,7 +199,7 @@ class DBConnection(object):
logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR) logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR)
raise raise
return sqlResult return sqlResult
def select(self, query, args=None): def select(self, query, args=None):
@ -342,11 +342,7 @@ class SchemaUpgrade(object):
self.connection.action("UPDATE %s SET %s = ?" % (table, column), (default,)) self.connection.action("UPDATE %s SET %s = ?" % (table, column), (default,))
def checkDBVersion(self): def checkDBVersion(self):
result = self.connection.select("SELECT db_version FROM db_version") return self.connection.checkDBVersion()
if result:
return int(result[0]["db_version"])
else:
return 0
def incDBVersion(self): def incDBVersion(self):
new_version = self.checkDBVersion() + 1 new_version = self.checkDBVersion() + 1

View File

@ -192,7 +192,7 @@ def update_network_dict():
if ql: if ql:
myDB.mass_action(ql) myDB.mass_action(ql)
load_network_dict() load_network_dict()
del ql
# load network timezones from db into dict # load network timezones from db into dict
def load_network_dict(): def load_network_dict():

View File

@ -965,7 +965,7 @@ class PostProcessor(object):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
# find the destination folder # find the destination folder
try: try:
@ -1043,7 +1043,7 @@ class PostProcessor(object):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
# log it to history # log it to history
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)

View File

@ -346,7 +346,6 @@ class BTNCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
else: else:
raise AuthException( raise AuthException(

View File

@ -257,7 +257,7 @@ class HDBitsCache(tvcache.TVCache):
if ql: if ql:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(ql) myDB.mass_action(ql)
del ql
else: else:
raise exceptions.AuthException( raise exceptions.AuthException(

View File

@ -379,7 +379,7 @@ class HDTorrentsCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -320,7 +320,7 @@ class IPTorrentsCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -457,7 +457,7 @@ class KATCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -344,7 +344,7 @@ class NewznabCache(tvcache.TVCache):
if ql: if ql:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(ql) myDB.mass_action(ql)
del ql
else: else:
raise AuthException( raise AuthException(

View File

@ -369,7 +369,7 @@ class NextGenCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -342,7 +342,7 @@ class PublicHDCache(tvcache.TVCache):
if ql: if ql:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(ql) myDB.mass_action(ql)
del ql
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -364,7 +364,7 @@ class SCCCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -304,7 +304,7 @@ class SpeedCDCache(tvcache.TVCache):
if ql: if ql:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(ql) myDB.mass_action(ql)
del ql
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -437,7 +437,7 @@ class ThePirateBayCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -328,7 +328,7 @@ class TorrentDayCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -323,7 +323,7 @@ class TorrentLeechCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -70,7 +70,6 @@ class WombleCache(tvcache.TVCache):
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
# set last updated # set last updated
if data: if data:

View File

@ -528,7 +528,7 @@ def xem_refresh(indexer_id, indexer, force=False):
if ql: if ql:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(ql) myDB.mass_action(ql)
del ql
def fix_xem_numbering(indexer_id, indexer): def fix_xem_numbering(indexer_id, indexer):
""" """
@ -692,4 +692,3 @@ def fix_xem_numbering(indexer_id, indexer):
if ql: if ql:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(ql) myDB.mass_action(ql)
del ql

View File

@ -175,7 +175,7 @@ def snatchEpisode(result, endStatus=SNATCHED):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
return True return True

View File

@ -456,7 +456,7 @@ class TVShow(object):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
def loadEpisodesFromDB(self): def loadEpisodesFromDB(self):
@ -578,7 +578,7 @@ class TVShow(object):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
# Done updating save last update date # Done updating save last update date
self.last_update_indexer = datetime.date.today().toordinal() self.last_update_indexer = datetime.date.today().toordinal()
@ -749,7 +749,7 @@ class TVShow(object):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
# creating metafiles on the root should be good enough # creating metafiles on the root should be good enough
if sickbeard.USE_FAILED_DOWNLOADS and rootEp is not None: if sickbeard.USE_FAILED_DOWNLOADS and rootEp is not None:
@ -1005,7 +1005,7 @@ class TVShow(object):
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
# remove self from show list # remove self from show list
sickbeard.showList = [x for x in sickbeard.showList if int(x.indexerid) != self.indexerid] sickbeard.showList = [x for x in sickbeard.showList if int(x.indexerid) != self.indexerid]
@ -1080,7 +1080,7 @@ class TVShow(object):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
def airdateModifyStamp(self, ep_obj): def airdateModifyStamp(self, ep_obj):
""" """
@ -2420,4 +2420,4 @@ class TVEpisode(object):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l

View File

@ -37,6 +37,7 @@ from sickbeard.rssfeeds import RSSFeeds
cache_lock = threading.Lock() cache_lock = threading.Lock()
class CacheDBConnection(db.DBConnection): class CacheDBConnection(db.DBConnection):
def __init__(self, providerName): def __init__(self, providerName):
db.DBConnection.__init__(self, "cache.db") db.DBConnection.__init__(self, "cache.db")
@ -67,6 +68,7 @@ class CacheDBConnection(db.DBConnection):
if str(e) != "table lastUpdate already exists": if str(e) != "table lastUpdate already exists":
raise raise
class TVCache(): class TVCache():
def __init__(self, provider): def __init__(self, provider):
@ -75,11 +77,12 @@ class TVCache():
self.minTime = 10 self.minTime = 10
def _getDB(self): def _getDB(self):
return CacheDBConnection(self.providerID) return CacheDBConnection(self.providerID)
def _clearCache(self): def _clearCache(self):
if self.shouldClearCache(): if self.shouldClearCache():
logger.log(u"Clearing " + self.provider.name + " cache")
curDate = datetime.date.today() - datetime.timedelta(weeks=1) curDate = datetime.date.today() - datetime.timedelta(weeks=1)
myDB = self._getDB() myDB = self._getDB()
@ -103,14 +106,9 @@ class TVCache():
def updateCache(self): def updateCache(self):
# delete anything older then 7 days if self.shouldUpdate() and self._checkAuth(None):
logger.log(u"Clearing " + self.provider.name + " cache") self._clearCache()
self._clearCache()
if not self.shouldUpdate():
return
if self._checkAuth(None):
data = self._getRSSData() data = self._getRSSData()
# as long as the http request worked we count this as an update # as long as the http request worked we count this as an update
@ -129,7 +127,6 @@ class TVCache():
if cl: if cl:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
del cl
else: else:
raise AuthException( raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config") u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
@ -219,10 +216,10 @@ class TVCache():
def shouldUpdate(self): def shouldUpdate(self):
# if we've updated recently then skip the update # if we've updated recently then skip the update
# if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime): if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
# logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str( logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(
# datetime.timedelta(minutes=self.minTime)), logger.DEBUG) datetime.timedelta(minutes=self.minTime)), logger.DEBUG)
# return False return False
return True return True
@ -230,7 +227,7 @@ class TVCache():
# if daily search hasn't used our previous results yet then don't clear the cache # if daily search hasn't used our previous results yet then don't clear the cache
if self.lastUpdate > self.lastSearch: if self.lastUpdate > self.lastSearch:
logger.log( logger.log(
u"Daily search has not yet searched our last cache results, skipping clearig cache ...", logger.DEBUG) u"Daily search has not yet used our last cache results, not clearing cache ...", logger.DEBUG)
return False return False
return True return True

View File

@ -1007,7 +1007,7 @@ class CMD_EpisodeSetStatus(ApiCall):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
extra_msg = "" extra_msg = ""
if start_backlog: if start_backlog:

View File

@ -3998,7 +3998,7 @@ class Home(MainHandler):
if sql_l: if sql_l:
myDB = db.DBConnection() myDB = db.DBConnection()
myDB.mass_action(sql_l) myDB.mass_action(sql_l)
del sql_l
if int(status) == WANTED: if int(status) == WANTED:
msg = "Backlog was automatically started for the following seasons of <b>" + showObj.name + "</b>:<br />" msg = "Backlog was automatically started for the following seasons of <b>" + showObj.name + "</b>:<br />"

View File

@ -1,6 +1,5 @@
import os import os
import traceback import traceback
import time
import sickbeard import sickbeard
import webserve import webserve
import webapi import webapi