mirror of
https://github.com/moparisthebest/SickRage
synced 2025-01-07 11:58:01 -05:00
Modified DB code to close its connection if right after its finished with it, helps performance-wise and should resolve locked db issues as well.
This commit is contained in:
parent
2c0f3a3dc3
commit
d00d55fdfc
20
SickBeard.py
20
SickBeard.py
@ -76,7 +76,7 @@ def loadShowsFromDB():
|
||||
Populates the showList with shows from the database
|
||||
"""
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows")
|
||||
|
||||
for sqlShow in sqlResults:
|
||||
@ -187,16 +187,6 @@ def main():
|
||||
# Need console logging for SickBeard.py and SickBeard-console.exe
|
||||
consoleLogging = (not hasattr(sys, "frozen")) or (sickbeard.MY_NAME.lower().find('-console') > 0)
|
||||
|
||||
# Attempt to rename the process for easier debugging
|
||||
try:
|
||||
from setproctitle import setproctitle
|
||||
except ImportError:
|
||||
if consoleLogging:
|
||||
sys.stderr.write(u"setproctitle module is not available.\n")
|
||||
setproctitle = lambda t: None
|
||||
|
||||
setproctitle(sickbeard.MY_NAME)
|
||||
|
||||
# Rename the main thread
|
||||
threading.currentThread().name = "MAIN"
|
||||
|
||||
@ -306,16 +296,18 @@ def main():
|
||||
|
||||
sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE)
|
||||
|
||||
CUR_DB_VERSION = db.DBConnection().checkDBVersion()
|
||||
with db.DBConnection() as myDB:
|
||||
CUR_DB_VERSION = myDB.checkDBVersion()
|
||||
|
||||
if CUR_DB_VERSION > 0:
|
||||
if CUR_DB_VERSION < MIN_DB_VERSION:
|
||||
raise SystemExit("Your database version (" + str(
|
||||
db.DBConnection().checkDBVersion()) + ") is too old to migrate from with this version of SickRage (" + str(
|
||||
CUR_DB_VERSION) + ") is too old to migrate from with this version of SickRage (" + str(
|
||||
MIN_DB_VERSION) + ").\n" + \
|
||||
"Upgrade using a previous version of SB first, or start with no database file to begin fresh.")
|
||||
if CUR_DB_VERSION > MAX_DB_VERSION:
|
||||
raise SystemExit("Your database version (" + str(
|
||||
db.DBConnection().checkDBVersion()) + ") has been incremented past what this version of SickRage supports (" + str(
|
||||
CUR_DB_VERSION) + ") has been incremented past what this version of SickRage supports (" + str(
|
||||
MAX_DB_VERSION) + ").\n" + \
|
||||
"If you have used other forks of SB, your database may be unusable due to their modifications.")
|
||||
|
||||
|
@ -433,7 +433,6 @@ TMDB_API_KEY = 'edc5f123313769de83a71e157758030b'
|
||||
|
||||
__INITIALIZED__ = False
|
||||
|
||||
|
||||
def initialize(consoleLogging=True):
|
||||
with INIT_LOCK:
|
||||
|
||||
@ -906,16 +905,20 @@ def initialize(consoleLogging=True):
|
||||
logger.sb_log_instance.initLogging(consoleLogging=consoleLogging)
|
||||
|
||||
# initialize the main SB database
|
||||
db.upgradeDatabase(db.DBConnection(), mainDB.InitialSchema)
|
||||
with db.DBConnection() as myDB:
|
||||
db.upgradeDatabase(myDB, mainDB.InitialSchema)
|
||||
|
||||
# initialize the cache database
|
||||
db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema)
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
db.upgradeDatabase(myDB, cache_db.InitialSchema)
|
||||
|
||||
# initialize the failed downloads database
|
||||
db.upgradeDatabase(db.DBConnection("failed.db"), failed_db.InitialSchema)
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
db.upgradeDatabase(myDB, failed_db.InitialSchema)
|
||||
|
||||
# fix up any db problems
|
||||
db.sanityCheckDatabase(db.DBConnection(), mainDB.MainSanityCheck)
|
||||
with db.DBConnection() as myDB:
|
||||
db.sanityCheckDatabase(myDB, mainDB.MainSanityCheck)
|
||||
|
||||
# migrate the config if it needs it
|
||||
migrator = ConfigMigrator(CFG)
|
||||
@ -1803,7 +1806,7 @@ def getEpList(epIDs, showid=None):
|
||||
query += " AND showid = ?"
|
||||
params.append(showid)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(query, params)
|
||||
|
||||
epList = []
|
||||
|
@ -33,8 +33,6 @@ class BlackAndWhiteList(object):
|
||||
if not show_id:
|
||||
raise BlackWhitelistNoShowIDException()
|
||||
self.show_id = show_id
|
||||
|
||||
self.myDB = db.DBConnection()
|
||||
self.refresh()
|
||||
|
||||
def refresh(self):
|
||||
@ -98,8 +96,9 @@ class BlackAndWhiteList(object):
|
||||
return "Blacklist: " + blackResult + ", Whitelist: " + whiteResult
|
||||
|
||||
def _add_keywords(self, table, range, values):
|
||||
with db.DBConnection() as myDB:
|
||||
for value in values:
|
||||
self.myDB.action("INSERT INTO " + table + " (show_id, range , keyword) VALUES (?,?,?)", [self.show_id, range, value])
|
||||
myDB.action("INSERT INTO " + table + " (show_id, range , keyword) VALUES (?,?,?)", [self.show_id, range, value])
|
||||
self.refresh()
|
||||
|
||||
def _del_all_black_keywords(self):
|
||||
@ -116,16 +115,19 @@ class BlackAndWhiteList(object):
|
||||
|
||||
def _del_all_keywords(self, table):
|
||||
logger.log(u"Deleting all " + table + " keywords for " + str(self.show_id), logger.DEBUG)
|
||||
self.myDB.action("DELETE FROM " + table + " WHERE show_id = ?", [self.show_id])
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.action("DELETE FROM " + table + " WHERE show_id = ?", [self.show_id])
|
||||
self.refresh()
|
||||
|
||||
def _del_all_keywords_for(self, table, range):
|
||||
logger.log(u"Deleting all " + range + " " + table + " keywords for " + str(self.show_id), logger.DEBUG)
|
||||
self.myDB.action("DELETE FROM " + table + " WHERE show_id = ? and range = ?", [self.show_id, range])
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.action("DELETE FROM " + table + " WHERE show_id = ? and range = ?", [self.show_id, range])
|
||||
self.refresh()
|
||||
|
||||
def _load_list(self, table):
|
||||
sqlResults = self.myDB.select("SELECT range,keyword FROM " + table + " WHERE show_id = ? ", [self.show_id])
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT range,keyword FROM " + table + " WHERE show_id = ? ", [self.show_id])
|
||||
if not sqlResults or not len(sqlResults):
|
||||
return ([], {})
|
||||
|
||||
|
@ -467,7 +467,7 @@ class ConfigMigrator():
|
||||
sickbeard.NAMING_MULTI_EP = int(check_setting_int(self.config_obj, 'General', 'naming_multi_ep_type', 1))
|
||||
|
||||
# see if any of their shows used season folders
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
season_folder_shows = myDB.select("SELECT * FROM tv_shows WHERE flatten_folders = 0")
|
||||
|
||||
# if any shows had season folders on then prepend season folder to the pattern
|
||||
|
@ -43,7 +43,7 @@ class DailySearcher():
|
||||
fromDate = datetime.date.today() - datetime.timedelta(weeks=1)
|
||||
curDate = datetime.date.today()
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status in (?,?) AND airdate >= ? AND airdate <= ?",
|
||||
[common.UNAIRED, common.WANTED, fromDate.toordinal(), curDate.toordinal()])
|
||||
|
||||
@ -80,7 +80,7 @@ class DailySearcher():
|
||||
sql_l.append(ep.get_sql())
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
if len(todaysEps):
|
||||
|
@ -46,7 +46,7 @@ def dbFilename(filename="sickbeard.db", suffix=None):
|
||||
return ek.ek(os.path.join, sickbeard.DATA_DIR, filename)
|
||||
|
||||
|
||||
class DBConnection:
|
||||
class DBConnection(object):
|
||||
def __init__(self, filename="sickbeard.db", suffix=None, row_type=None):
|
||||
|
||||
self.filename = filename
|
||||
@ -70,45 +70,6 @@ class DBConnection:
|
||||
else:
|
||||
return 0
|
||||
|
||||
def fetch(self, query, args=None):
|
||||
|
||||
with db_lock:
|
||||
|
||||
if query == None:
|
||||
return
|
||||
|
||||
sqlResult = None
|
||||
attempt = 0
|
||||
|
||||
while attempt < 5:
|
||||
try:
|
||||
if args == None:
|
||||
logger.log(self.filename + ": " + query, logger.DB)
|
||||
cursor = self.connection.cursor()
|
||||
cursor.execute(query)
|
||||
sqlResult = cursor.fetchone()[0]
|
||||
else:
|
||||
logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB)
|
||||
cursor = self.connection.cursor()
|
||||
cursor.execute(query, args)
|
||||
sqlResult = cursor.fetchone()[0]
|
||||
|
||||
# get out of the connection attempt loop since we were successful
|
||||
break
|
||||
except sqlite3.OperationalError, e:
|
||||
if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
|
||||
logger.log(u"DB error: " + ex(e), logger.WARNING)
|
||||
attempt += 1
|
||||
time.sleep(0.02)
|
||||
else:
|
||||
logger.log(u"DB error: " + ex(e), logger.ERROR)
|
||||
raise
|
||||
except sqlite3.DatabaseError, e:
|
||||
logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR)
|
||||
raise
|
||||
|
||||
return sqlResult
|
||||
|
||||
def mass_action(self, querylist, logTransaction=False):
|
||||
|
||||
with db_lock:
|
||||
@ -244,6 +205,15 @@ class DBConnection:
|
||||
def close(self):
|
||||
self.connection.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
def sanityCheckDatabase(connection, sanity_check):
|
||||
sanity_check(connection).check()
|
||||
|
||||
|
@ -50,7 +50,7 @@ def logFailed(release):
|
||||
|
||||
release = prepareFailedName(release)
|
||||
|
||||
myDB = db.DBConnection("failed.db")
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
sql_results = myDB.select("SELECT * FROM history WHERE release=?", [release])
|
||||
|
||||
if len(sql_results) == 0:
|
||||
@ -86,10 +86,9 @@ def logFailed(release):
|
||||
|
||||
|
||||
def logSuccess(release):
|
||||
myDB = db.DBConnection("failed.db")
|
||||
|
||||
release = prepareFailedName(release)
|
||||
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
myDB.action("DELETE FROM history WHERE release=?", [release])
|
||||
|
||||
|
||||
@ -104,19 +103,20 @@ def hasFailed(release, size, provider="%"):
|
||||
|
||||
release = prepareFailedName(release)
|
||||
|
||||
myDB = db.DBConnection("failed.db")
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
sql_results = myDB.select(
|
||||
"SELECT * FROM failed WHERE release=? AND size=? AND provider LIKE ?",
|
||||
[release, size, provider])
|
||||
|
||||
return (len(sql_results) > 0)
|
||||
|
||||
|
||||
def revertEpisode(epObj):
|
||||
"""Restore the episodes of a failed download to their original state"""
|
||||
myDB = db.DBConnection("failed.db")
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
sql_results = myDB.select("SELECT * FROM history WHERE showid=? AND season=?",
|
||||
[epObj.show.indexerid, epObj.season])
|
||||
|
||||
|
||||
sql_results = myDB.select("SELECT * FROM history WHERE showid=? AND season=?", [epObj.show.indexerid, epObj.season])
|
||||
history_eps = dict([(res["episode"], res) for res in sql_results])
|
||||
|
||||
try:
|
||||
@ -135,6 +135,7 @@ def revertEpisode(epObj):
|
||||
logger.log(u"Unable to create episode, please set its status manually: " + ex(e),
|
||||
logger.WARNING)
|
||||
|
||||
|
||||
def markFailed(epObj):
|
||||
log_str = u""
|
||||
|
||||
@ -151,8 +152,6 @@ def markFailed(epObj):
|
||||
|
||||
|
||||
def logSnatch(searchResult):
|
||||
myDB = db.DBConnection("failed.db")
|
||||
|
||||
logDate = datetime.datetime.today().strftime(dateFormat)
|
||||
release = prepareFailedName(searchResult.name)
|
||||
|
||||
@ -164,6 +163,7 @@ def logSnatch(searchResult):
|
||||
|
||||
show_obj = searchResult.episodes[0].show
|
||||
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
for episode in searchResult.episodes:
|
||||
myDB.action(
|
||||
"INSERT INTO history (date, size, release, provider, showid, season, episode, old_status)"
|
||||
@ -173,19 +173,19 @@ def logSnatch(searchResult):
|
||||
|
||||
|
||||
def deleteLoggedSnatch(release, size, provider):
|
||||
myDB = db.DBConnection("failed.db")
|
||||
|
||||
release = prepareFailedName(release)
|
||||
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
myDB.action("DELETE FROM history WHERE release=? AND size=? AND provider=?",
|
||||
[release, size, provider])
|
||||
|
||||
|
||||
def trimHistory():
|
||||
myDB = db.DBConnection("failed.db")
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
myDB.action("DELETE FROM history WHERE date < " + str(
|
||||
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
|
||||
|
||||
|
||||
def findRelease(epObj):
|
||||
"""
|
||||
Find releases in history by show ID and season.
|
||||
@ -195,11 +195,12 @@ def findRelease(epObj):
|
||||
release = None
|
||||
provider = None
|
||||
|
||||
myDB = db.DBConnection("failed.db")
|
||||
|
||||
# Clear old snatches for this release if any exist
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
myDB.action("DELETE FROM history WHERE showid=" + str(epObj.show.indexerid) + " AND season=" + str(
|
||||
epObj.season) + " AND episode=" + str(epObj.episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(
|
||||
epObj.season) + " AND episode=" + str(
|
||||
epObj.episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(
|
||||
epObj.show.indexerid) + " AND season=" + str(epObj.season) + " AND episode=" + str(epObj.episode) + ")")
|
||||
|
||||
# Search for release in snatch history
|
||||
|
@ -288,10 +288,9 @@ def makeDir(path):
|
||||
def searchDBForShow(regShowName, log=False):
|
||||
showNames = [re.sub('[. -]', ' ', regShowName)]
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
yearRegex = "([^()]+?)\s*(\()?(\d{4})(?(2)\))$"
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
for showName in showNames:
|
||||
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ?",
|
||||
@ -681,7 +680,7 @@ def update_anime_support():
|
||||
sickbeard.ANIMESUPPORT = is_anime_in_show_list()
|
||||
|
||||
def get_absolute_number_from_season_and_episode(show, season, episode):
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sql = "SELECT * FROM tv_episodes WHERE showid = ? and season = ? and episode = ?"
|
||||
sqlResults = myDB.select(sql, [show.indexerid, season, episode])
|
||||
|
||||
|
@ -31,7 +31,7 @@ def _logHistoryItem(action, showid, season, episode, quality, resource, provider
|
||||
if not isinstance(resource, unicode):
|
||||
resource = unicode(resource, 'utf-8')
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.action(
|
||||
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
|
||||
[action, logDate, showid, season, episode, quality, resource, provider])
|
||||
|
@ -32,8 +32,8 @@ def addNameToCache(name, indexer_id=0):
|
||||
|
||||
# standardize the name we're using to account for small differences in providers
|
||||
name = sanitizeSceneName(name)
|
||||
cacheDB = db.DBConnection('cache.db')
|
||||
cacheDB.action("INSERT INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name])
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
myDB.action("INSERT INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name])
|
||||
|
||||
|
||||
def retrieveNameFromCache(name):
|
||||
@ -50,9 +50,9 @@ def retrieveNameFromCache(name):
|
||||
# standardize the name we're using to account for small differences in providers
|
||||
name = sanitizeSceneName(name)
|
||||
|
||||
cacheDB = db.DBConnection('cache.db')
|
||||
if cacheDB.hasTable('scene_names'):
|
||||
cache_results = cacheDB.select("SELECT * FROM scene_names WHERE name = ?", [name])
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
if myDB.hasTable('scene_names'):
|
||||
cache_results = myDB.select("SELECT * FROM scene_names WHERE name = ?", [name])
|
||||
|
||||
if cache_results:
|
||||
return int(cache_results[0]["indexer_id"])
|
||||
@ -66,10 +66,11 @@ def clearCache(show=None, season=-1, indexer_id=0):
|
||||
"""
|
||||
Deletes all "unknown" entries from the cache (names with indexer_id of 0).
|
||||
"""
|
||||
cacheDB = db.DBConnection('cache.db')
|
||||
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
if show:
|
||||
showNames = sickbeard.show_name_helpers.allPossibleShowNames(show, season=season)
|
||||
for showName in showNames:
|
||||
cacheDB.action("DELETE FROM scene_names WHERE name = ? and indexer_id = ?", [showName, indexer_id])
|
||||
myDB.action("DELETE FROM scene_names WHERE name = ? and indexer_id = ?", [showName, indexer_id])
|
||||
else:
|
||||
cacheDB.action("DELETE FROM scene_names WHERE indexer_id = ?", [indexer_id])
|
||||
myDB.action("DELETE FROM scene_names WHERE indexer_id = ?", [indexer_id])
|
@ -164,7 +164,7 @@ def update_network_dict():
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
myDB = db.DBConnection("cache.db")
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
# load current network timezones
|
||||
old_d = dict(myDB.select("SELECT * FROM network_timezones"))
|
||||
|
||||
@ -195,7 +195,7 @@ def update_network_dict():
|
||||
def load_network_dict():
|
||||
d = {}
|
||||
try:
|
||||
myDB = db.DBConnection("cache.db")
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
cur_network_list = myDB.select("SELECT * FROM network_timezones")
|
||||
if cur_network_list is None or len(cur_network_list) < 1:
|
||||
update_network_dict()
|
||||
|
@ -151,9 +151,9 @@ class EmailNotifier:
|
||||
addrs.append(addr)
|
||||
|
||||
# Grab the recipients for the show
|
||||
mydb = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
for s in show:
|
||||
for subs in mydb.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (s,)):
|
||||
for subs in myDB.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (s,)):
|
||||
if subs['notify_list']:
|
||||
for addr in subs['notify_list'].split(','):
|
||||
if (len(addr.strip()) > 0):
|
||||
|
@ -405,9 +405,8 @@ class PostProcessor(object):
|
||||
if self.folder_name:
|
||||
names.append(self.folder_name)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
# search the database for a possible match and return immediately if we find one
|
||||
with db.DBConnection() as myDB:
|
||||
for curName in names:
|
||||
search_name = re.sub("[\.\-\ ]", "_", curName)
|
||||
sql_results = myDB.select("SELECT * FROM history WHERE resource LIKE ?", [search_name])
|
||||
@ -624,7 +623,7 @@ class PostProcessor(object):
|
||||
self._log(u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
airdate = episodes[0].toordinal()
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sql_result = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
|
||||
[show.indexerid, show.indexer, airdate])
|
||||
|
||||
@ -640,7 +639,7 @@ class PostProcessor(object):
|
||||
|
||||
# if there's no season then we can hopefully just use 1 automatically
|
||||
elif season == None and show:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
numseasonsSQlResult = myDB.select(
|
||||
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0",
|
||||
[show.indexerid, show.indexer])
|
||||
@ -965,7 +964,7 @@ class PostProcessor(object):
|
||||
self._log(u"Couldn't find release in snatch history", logger.WARNING)
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
# find the destination folder
|
||||
@ -1042,7 +1041,7 @@ class PostProcessor(object):
|
||||
sql_l.append(ep_obj.get_sql())
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
# log it to history
|
||||
|
@ -242,8 +242,9 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
|
||||
return False
|
||||
|
||||
# make sure the dir isn't inside a show dir
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows")
|
||||
|
||||
for sqlShow in sqlResults:
|
||||
if dirName.lower().startswith(
|
||||
ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek(
|
||||
@ -344,7 +345,7 @@ def already_postprocessed(dirName, videofile, force):
|
||||
dirName = unicode(dirName, 'utf_8')
|
||||
|
||||
# Avoid processing the same dir again if we use a process method <> move
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName])
|
||||
if sqlResult:
|
||||
returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping",
|
||||
|
@ -182,7 +182,7 @@ class ProperFinder():
|
||||
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
airdate = curProper.episode.toordinal()
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sql_result = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
|
||||
[curProper.indexerid, curProper.indexer, airdate])
|
||||
@ -196,11 +196,14 @@ class ProperFinder():
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right quality)
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
"SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[curProper.indexerid, curProper.season, curProper.episode])
|
||||
|
||||
if not sqlResults:
|
||||
continue
|
||||
|
||||
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
|
||||
|
||||
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
||||
@ -222,7 +225,7 @@ class ProperFinder():
|
||||
historyLimit = datetime.datetime.today() - datetime.timedelta(days=30)
|
||||
|
||||
# make sure the episode has been downloaded before
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
historyResults = myDB.select(
|
||||
"SELECT resource FROM history "
|
||||
"WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? "
|
||||
@ -273,7 +276,7 @@ class ProperFinder():
|
||||
|
||||
logger.log(u"Setting the last Proper search in the DB to " + str(when), logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM info")
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
@ -284,7 +287,7 @@ class ProperFinder():
|
||||
|
||||
def _get_lastProperSearch(self):
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM info")
|
||||
|
||||
try:
|
||||
|
@ -321,7 +321,7 @@ class GenericProvider:
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sql_results = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
|
||||
[show.indexerid,
|
||||
|
@ -314,13 +314,15 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -257,13 +257,15 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -393,13 +393,15 @@ class KATProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -305,13 +305,15 @@ class NextGenProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -277,13 +277,15 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -299,13 +299,15 @@ class SCCProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -239,13 +239,15 @@ class SpeedCDProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -371,13 +371,15 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -263,13 +263,15 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -258,13 +258,15 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
||||
|
||||
results = []
|
||||
|
||||
sqlResults = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
|
@ -41,7 +41,7 @@ def get_scene_exceptions(indexer_id, season=-1):
|
||||
global exceptionCache
|
||||
|
||||
if indexer_id not in exceptionCache or season not in exceptionCache[indexer_id]:
|
||||
myDB = db.DBConnection("cache.db")
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
exceptions = myDB.select("SELECT show_name FROM scene_exceptions WHERE indexer_id = ? and season = ?",
|
||||
[indexer_id, season])
|
||||
exceptionsList = list(set([cur_exception["show_name"] for cur_exception in exceptions]))
|
||||
@ -60,7 +60,7 @@ def get_scene_exceptions(indexer_id, season=-1):
|
||||
return exceptionsList
|
||||
|
||||
def get_all_scene_exceptions(indexer_id):
|
||||
myDB = db.DBConnection("cache.db")
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
exceptions = myDB.select("SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
||||
exceptionsList = {}
|
||||
[cur_exception["show_name"] for cur_exception in exceptions]
|
||||
@ -77,7 +77,7 @@ def get_scene_seasons(indexer_id):
|
||||
"""
|
||||
global exceptionSeasonCache
|
||||
if indexer_id not in exceptionSeasonCache:
|
||||
myDB = db.DBConnection("cache.db")
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
sqlResults = myDB.select("SELECT DISTINCT(season) as season FROM scene_exceptions WHERE indexer_id = ?",
|
||||
[indexer_id])
|
||||
exceptionSeasonCache[indexer_id] = [int(x["season"]) for x in sqlResults]
|
||||
@ -95,9 +95,8 @@ def get_scene_exception_by_name_multiple(show_name):
|
||||
is present.
|
||||
"""
|
||||
|
||||
myDB = db.DBConnection("cache.db")
|
||||
|
||||
# try the obvious case first
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
exception_result = myDB.select(
|
||||
"SELECT indexer_id, season FROM scene_exceptions WHERE LOWER(show_name) = ? ORDER BY season ASC",
|
||||
[show_name.lower()])
|
||||
@ -106,6 +105,7 @@ def get_scene_exception_by_name_multiple(show_name):
|
||||
|
||||
out = []
|
||||
all_exception_results = myDB.select("SELECT show_name, indexer_id, season FROM scene_exceptions")
|
||||
|
||||
for cur_exception in all_exception_results:
|
||||
|
||||
cur_exception_name = cur_exception["show_name"]
|
||||
@ -180,11 +180,10 @@ def retrieve_exceptions():
|
||||
else:
|
||||
exception_dict[local_ex] = local_exceptions[local_ex]
|
||||
|
||||
myDB = db.DBConnection("cache.db")
|
||||
|
||||
changed_exceptions = False
|
||||
|
||||
# write all the exceptions we got off the net into the database
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
for cur_indexer_id in exception_dict:
|
||||
|
||||
# get a list of the existing exceptions for this ID
|
||||
@ -217,8 +216,7 @@ def update_scene_exceptions(indexer_id, scene_exceptions):
|
||||
|
||||
global exceptionIndexerCache
|
||||
|
||||
myDB = db.DBConnection("cache.db")
|
||||
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
myDB.action('DELETE FROM scene_exceptions WHERE indexer_id=? and custom=1', [indexer_id])
|
||||
|
||||
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
|
||||
@ -237,9 +235,8 @@ def _retrieve_anidb_mainnames():
|
||||
|
||||
anidb_mainNames = {}
|
||||
|
||||
cacheDB = db.DBConnection('cache.db')
|
||||
|
||||
rows = cacheDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
||||
['anidb'])
|
||||
if rows:
|
||||
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_ANIDB_AGE_SECS)
|
||||
@ -260,7 +257,7 @@ def _retrieve_anidb_mainnames():
|
||||
anidb_mainNames[show.indexerid] = [{anime.name: -1}]
|
||||
|
||||
if success:
|
||||
cacheDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
|
||||
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
|
||||
['anidb', time.time()])
|
||||
|
||||
return anidb_mainNames
|
||||
@ -271,9 +268,8 @@ def _xem_excpetions_fetcher(indexer):
|
||||
|
||||
exception_dict = {}
|
||||
|
||||
cacheDB = db.DBConnection('cache.db')
|
||||
|
||||
rows = cacheDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
||||
['xem'])
|
||||
if rows:
|
||||
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
|
||||
@ -291,7 +287,7 @@ def _xem_excpetions_fetcher(indexer):
|
||||
if url_data['result'] == 'failure':
|
||||
return exception_dict
|
||||
|
||||
cacheDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
|
||||
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
|
||||
['xem', time.time()])
|
||||
|
||||
for indexerid, names in url_data['data'].items():
|
||||
@ -303,7 +299,7 @@ def _xem_excpetions_fetcher(indexer):
|
||||
def getSceneSeasons(indexer_id):
|
||||
"""get a list of season numbers that have scene excpetions
|
||||
"""
|
||||
myDB = db.DBConnection("cache.db")
|
||||
with db.DBConnection('cache.db') as myDB:
|
||||
seasons = myDB.select("SELECT DISTINCT season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
||||
return [cur_exception["season"] for cur_exception in seasons]
|
||||
|
||||
|
@ -82,11 +82,11 @@ def find_scene_numbering(indexer_id, indexer, season, episode):
|
||||
indexer_id = int(indexer_id)
|
||||
indexer = int(indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
"SELECT scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0",
|
||||
[indexer, indexer_id, season, episode])
|
||||
|
||||
if rows:
|
||||
return (int(rows[0]["scene_season"]), int(rows[0]["scene_episode"]))
|
||||
|
||||
@ -134,11 +134,11 @@ def find_scene_absolute_numbering(indexer_id, indexer, absolute_number):
|
||||
indexer_id = int(indexer_id)
|
||||
indexer = int(indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
"SELECT scene_absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and absolute_number = ? and scene_absolute_number != 0",
|
||||
[indexer, indexer_id, absolute_number])
|
||||
|
||||
if rows:
|
||||
return int(rows[0]["scene_absolute_number"])
|
||||
|
||||
@ -154,11 +154,11 @@ def get_indexer_numbering(indexer_id, indexer, sceneSeason, sceneEpisode, fallba
|
||||
indexer_id = int(indexer_id)
|
||||
indexer = int(indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
"SELECT season, episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?",
|
||||
[indexer, indexer_id, sceneSeason, sceneEpisode])
|
||||
|
||||
if rows:
|
||||
return (int(rows[0]["season"]), int(rows[0]["episode"]))
|
||||
else:
|
||||
@ -178,11 +178,11 @@ def get_indexer_absolute_numbering(indexer_id, indexer, sceneAbsoluteNumber, fal
|
||||
indexer_id = int(indexer_id)
|
||||
indexer = int(indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
"SELECT absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number = ?",
|
||||
[indexer, indexer_id, sceneAbsoluteNumber])
|
||||
|
||||
if rows:
|
||||
return int(rows[0]["absolute_number"])
|
||||
else:
|
||||
@ -203,8 +203,7 @@ def set_scene_numbering(indexer_id, indexer, season=None, episode=None, absolute
|
||||
indexer_id = int(indexer_id)
|
||||
indexer = int(indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
if season and episode:
|
||||
myDB.action(
|
||||
"INSERT OR IGNORE INTO scene_numbering (indexer, indexer_id, season, episode) VALUES (?,?,?,?)",
|
||||
@ -241,8 +240,7 @@ def find_xem_numbering(indexer_id, indexer, season, episode):
|
||||
|
||||
xem_refresh(indexer_id, indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
"SELECT scene_season, scene_episode FROM tv_episodes WHERE indexer = ? and showid = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0",
|
||||
[indexer, indexer_id, season, episode])
|
||||
@ -268,8 +266,7 @@ def find_xem_absolute_numbering(indexer_id, indexer, absolute_number):
|
||||
|
||||
xem_refresh(indexer_id, indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
"SELECT scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and absolute_number = ? and scene_absolute_number != 0",
|
||||
[indexer, indexer_id, absolute_number])
|
||||
@ -295,11 +292,11 @@ def get_indexer_numbering_for_xem(indexer_id, indexer, sceneSeason, sceneEpisode
|
||||
|
||||
xem_refresh(indexer_id, indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE indexer = ? and showid = ? and scene_season = ? and scene_episode = ?",
|
||||
[indexer, indexer_id, sceneSeason, sceneEpisode])
|
||||
|
||||
if rows:
|
||||
return (int(rows[0]["season"]), int(rows[0]["episode"]))
|
||||
|
||||
@ -322,11 +319,11 @@ def get_indexer_absolute_numbering_for_xem(indexer_id, indexer, sceneAbsoluteNum
|
||||
|
||||
xem_refresh(indexer_id, indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
"SELECT absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number = ?",
|
||||
[indexer, indexer_id, sceneAbsoluteNumber])
|
||||
|
||||
if rows:
|
||||
return int(rows[0]["absolute_number"])
|
||||
|
||||
@ -345,8 +342,7 @@ def get_scene_numbering_for_show(indexer_id, indexer):
|
||||
indexer_id = int(indexer_id)
|
||||
indexer = int(indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
'SELECT season, episode, scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and (scene_season or scene_episode) != 0 ORDER BY season, episode',
|
||||
[indexer, indexer_id])
|
||||
@ -377,8 +373,7 @@ def get_xem_numbering_for_show(indexer_id, indexer):
|
||||
|
||||
xem_refresh(indexer_id, indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
'SELECT season, episode, scene_season, scene_episode FROM tv_episodes WHERE indexer = ? and showid = ? and (scene_season or scene_episode) != 0 ORDER BY season, episode',
|
||||
[indexer, indexer_id])
|
||||
@ -407,8 +402,7 @@ def get_scene_absolute_numbering_for_show(indexer_id, indexer):
|
||||
indexer_id = int(indexer_id)
|
||||
indexer = int(indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
'SELECT absolute_number, scene_absolute_number FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_absolute_number != 0 ORDER BY absolute_number',
|
||||
[indexer, indexer_id])
|
||||
@ -437,9 +431,9 @@ def get_xem_absolute_numbering_for_show(indexer_id, indexer):
|
||||
|
||||
xem_refresh(indexer_id, indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
result = {}
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
'SELECT absolute_number, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number != 0 ORDER BY absolute_number',
|
||||
[indexer, indexer_id])
|
||||
@ -464,10 +458,10 @@ def xem_refresh(indexer_id, indexer, force=False):
|
||||
indexer_id = int(indexer_id)
|
||||
indexer = int(indexer)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select("SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?",
|
||||
[indexer, indexer_id])
|
||||
|
||||
if rows:
|
||||
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
|
||||
else:
|
||||
@ -532,6 +526,7 @@ def xem_refresh(indexer_id, indexer, force=False):
|
||||
return None
|
||||
|
||||
if ql:
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(ql)
|
||||
|
||||
# fix xem scene numbering issues
|
||||
@ -590,8 +585,7 @@ def fix_xem_numbering(indexer_id, indexer):
|
||||
# # Get query results
|
||||
# tmp = get_from_api(url, params=params)['result']
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
'SELECT season, episode, absolute_number, scene_season, scene_episode, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ?',
|
||||
[indexer, indexer_id])
|
||||
|
@ -175,7 +175,7 @@ def snatchEpisode(result, endStatus=SNATCHED):
|
||||
notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
return True
|
||||
@ -511,7 +511,7 @@ def searchProviders(show, season, episodes, manualSearch=False):
|
||||
u"The quality of the season " + bestSeasonNZB.provider.providerType + " is " + Quality.qualityStrings[
|
||||
seasonQual], logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
allEps = [int(x["episode"]) for x in
|
||||
myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND season = ?",
|
||||
[show.indexerid, season])]
|
||||
|
@ -117,7 +117,7 @@ class BacklogSearcher:
|
||||
|
||||
logger.log(u"Retrieving the last check time from the DB", logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM info")
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
@ -137,7 +137,7 @@ class BacklogSearcher:
|
||||
|
||||
logger.log(u"Seeing if we need anything from " + show.name)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
if show.air_by_date:
|
||||
sqlResults = myDB.select(
|
||||
"SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ANd ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
|
||||
@ -175,7 +175,7 @@ class BacklogSearcher:
|
||||
|
||||
logger.log(u"Setting the last backlog in the DB to " + str(when), logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM info")
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
|
@ -85,8 +85,8 @@ class ShowUpdater():
|
||||
stale_should_update = []
|
||||
stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal()
|
||||
|
||||
myDB = db.DBConnection()
|
||||
# last_update_date <= 90 days, sorted ASC because dates are ordinal
|
||||
with db.DBConnection() as myDB:
|
||||
sql_result = myDB.select(
|
||||
"SELECT indexer_id FROM tv_shows WHERE status = 'Ended' AND last_update_indexer <= ? ORDER BY last_update_indexer ASC LIMIT 10;",
|
||||
[stale_update_date])
|
||||
|
@ -108,7 +108,6 @@ def makeSceneShowSearchStrings(show, season=-1):
|
||||
|
||||
|
||||
def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
|
||||
myDB = db.DBConnection()
|
||||
|
||||
if show.air_by_date or show.sports:
|
||||
numseasons = 0
|
||||
@ -144,6 +143,7 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
|
||||
seasonStrings.append("%d" % ab_number)
|
||||
|
||||
else:
|
||||
with db.DBConnection() as myDB:
|
||||
numseasonsSQlResult = myDB.select(
|
||||
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
|
||||
[show.indexerid])
|
||||
@ -177,7 +177,7 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
|
||||
|
||||
|
||||
def makeSceneSearchString(show, ep_obj):
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
numseasonsSQlResult = myDB.select(
|
||||
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
|
||||
[show.indexerid])
|
||||
|
@ -363,7 +363,7 @@ class QueueItemAdd(ShowQueueItem):
|
||||
# if they gave a custom status then change all the eps to it
|
||||
if self.default_status != SKIPPED:
|
||||
logger.log(u"Setting all episodes to the specified default status: " + str(self.default_status))
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.action("UPDATE tv_episodes SET status = ? WHERE status = ? AND showid = ? AND season != 0",
|
||||
[self.default_status, SKIPPED, self.show.indexerid])
|
||||
|
||||
|
@ -104,9 +104,10 @@ class SubtitlesFinder():
|
||||
# - search count < 2 and diff(airdate, now) > 1 week : now -> 1d
|
||||
# - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d
|
||||
|
||||
myDB = db.DBConnection()
|
||||
today = datetime.date.today().toordinal()
|
||||
|
||||
# you have 5 minutes to understand that one. Good luck
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) AND ((e.subtitles_searchcount <= 2 AND (? - e.airdate) > 7) OR (e.subtitles_searchcount <= 7 AND (? - e.airdate) <= 7)) AND (e.status IN ('+','.join([str(x) for x in Quality.DOWNLOADED])+') OR (e.status IN ('+','.join([str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER])+') AND e.location != ""))', [today, wantedLanguages(True), today, today])
|
||||
if len(sqlResults) == 0:
|
||||
logger.log('No subtitles to download', logger.MESSAGE)
|
||||
@ -141,7 +142,6 @@ class SubtitlesFinder():
|
||||
|
||||
try:
|
||||
subtitles = epObj.downloadSubtitles()
|
||||
|
||||
except:
|
||||
logger.log(u'Unable to find subtitles', logger.DEBUG)
|
||||
return
|
||||
|
@ -153,8 +153,6 @@ class TVShow(object):
|
||||
|
||||
def getAllEpisodes(self, season=None, has_location=False):
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
sql_selection = "SELECT season, episode, "
|
||||
|
||||
# subselection to detect multi-episodes early, share_location > 0
|
||||
@ -171,6 +169,7 @@ class TVShow(object):
|
||||
# need ORDER episode ASC to rename multi-episodes in order S01E01-02
|
||||
sql_selection = sql_selection + " ORDER BY season ASC, episode ASC"
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
results = myDB.select(sql_selection)
|
||||
|
||||
ep_list = []
|
||||
@ -202,7 +201,7 @@ class TVShow(object):
|
||||
|
||||
# if we get an anime get the real season and episode
|
||||
if self.is_anime and absolute_number and not season and not episode:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sql = "SELECT * FROM tv_episodes WHERE showid = ? and absolute_number = ? and season != 0"
|
||||
sqlResults = myDB.select(sql, [self.indexerid, absolute_number])
|
||||
|
||||
@ -264,10 +263,10 @@ class TVShow(object):
|
||||
|
||||
graceperiod = datetime.timedelta(days=30)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
last_airdate = datetime.date.fromordinal(1)
|
||||
|
||||
# get latest aired episode to compare against today - graceperiod and today + graceperiod
|
||||
with db.DBConnection() as myDB:
|
||||
sql_result = myDB.select(
|
||||
"SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status > '1' ORDER BY airdate DESC LIMIT 1",
|
||||
[cur_indexerid])
|
||||
@ -331,7 +330,7 @@ class TVShow(object):
|
||||
|
||||
logger.log(str(self.indexerid) + u": Writing NFOs for all episodes")
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid])
|
||||
|
||||
for epResult in sqlResults:
|
||||
@ -422,14 +421,14 @@ class TVShow(object):
|
||||
sql_l.append(curEpisode.get_sql())
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
def loadEpisodesFromDB(self):
|
||||
|
||||
logger.log(u"Loading all episodes from the DB")
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sql = "SELECT * FROM tv_episodes WHERE showid = ?"
|
||||
sqlResults = myDB.select(sql, [self.indexerid])
|
||||
|
||||
@ -543,7 +542,7 @@ class TVShow(object):
|
||||
scannedEps[season][episode] = True
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
# Done updating save last update date
|
||||
@ -603,7 +602,7 @@ class TVShow(object):
|
||||
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sql_result = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
|
||||
[self.indexerid, self.indexer, airdate])
|
||||
@ -713,7 +712,7 @@ class TVShow(object):
|
||||
sql_l.append(curEp.get_sql())
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
# creating metafiles on the root should be good enough
|
||||
@ -727,8 +726,7 @@ class TVShow(object):
|
||||
|
||||
logger.log(str(self.indexerid) + u": Loading show info from database")
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE indexer_id = ?", [self.indexerid])
|
||||
|
||||
if len(sqlResults) > 1:
|
||||
@ -808,6 +806,7 @@ class TVShow(object):
|
||||
self.imdbid = sqlResults[0]["imdb_id"]
|
||||
|
||||
# Get IMDb_info from database
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM imdb_info WHERE indexer_id = ?", [self.indexerid])
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
@ -937,7 +936,7 @@ class TVShow(object):
|
||||
def nextEpisode(self):
|
||||
logger.log(str(self.indexerid) + ": Finding the episode which airs next", logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
innerQuery = "SELECT airdate FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status in (?,?) ORDER BY airdate ASC LIMIT 1"
|
||||
innerParams = [self.indexerid, datetime.date.today().toordinal(), UNAIRED, WANTED]
|
||||
query = "SELECT * FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= (" + innerQuery + ") and status in (?,?)"
|
||||
@ -959,14 +958,13 @@ class TVShow(object):
|
||||
|
||||
def deleteShow(self):
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
sql_l = [["DELETE FROM tv_episodes WHERE showid = ?", [self.indexerid]],
|
||||
["DELETE FROM tv_shows WHERE indexer_id = ?", [self.indexerid]],
|
||||
["DELETE FROM imdb_info WHERE indexer_id = ?", [self.indexerid]],
|
||||
["DELETE FROM xem_refresh WHERE indexer_id = ?", [self.indexerid]],
|
||||
["DELETE FROM scene_numbering WHERE indexer_id = ?", [self.indexerid]]]
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
# remove self from show list
|
||||
@ -996,7 +994,7 @@ class TVShow(object):
|
||||
# run through all locations from DB, check that they exist
|
||||
logger.log(str(self.indexerid) + u": Loading all episodes with a location from the database")
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid])
|
||||
|
||||
sql_l = []
|
||||
@ -1040,7 +1038,7 @@ class TVShow(object):
|
||||
self.airdateModifyStamp(curEp)
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
def airdateModifyStamp(self, ep_obj):
|
||||
@ -1090,9 +1088,11 @@ class TVShow(object):
|
||||
logger.log(str(self.indexerid) + ": Downloading subtitles", logger.DEBUG)
|
||||
|
||||
try:
|
||||
episodes = db.DBConnection().select(
|
||||
with db.DBConnection() as myDB:
|
||||
episodes = myDB.select(
|
||||
"SELECT location FROM tv_episodes WHERE showid = ? AND location NOT LIKE '' ORDER BY season DESC, episode DESC",
|
||||
[self.indexerid])
|
||||
|
||||
for episodeLoc in episodes:
|
||||
episode = self.makeEpFromFile(episodeLoc['location'])
|
||||
subtitles = episode.downloadSubtitles(force=force)
|
||||
@ -1104,8 +1104,6 @@ class TVShow(object):
|
||||
def saveToDB(self):
|
||||
logger.log(str(self.indexerid) + u": Saving show info to database", logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
controlValueDict = {"indexer_id": self.indexerid}
|
||||
newValueDict = {"indexer": self.indexer,
|
||||
"show_name": self.name,
|
||||
@ -1133,13 +1131,17 @@ class TVShow(object):
|
||||
"rls_ignore_words": self.rls_ignore_words,
|
||||
"rls_require_words": self.rls_require_words
|
||||
}
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.upsert("tv_shows", newValueDict, controlValueDict)
|
||||
|
||||
helpers.update_anime_support()
|
||||
|
||||
if self.imdbid:
|
||||
controlValueDict = {"indexer_id": self.indexerid}
|
||||
newValueDict = self.imdb_info
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.upsert("imdb_info", newValueDict, controlValueDict)
|
||||
|
||||
def __str__(self):
|
||||
@ -1180,7 +1182,7 @@ class TVShow(object):
|
||||
logger.log(u"Don't want this quality, ignoring found episode", logger.DEBUG)
|
||||
return False
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[self.indexerid, season, episode])
|
||||
|
||||
@ -1472,7 +1474,7 @@ class TVEpisode(object):
|
||||
str(self.show.indexerid) + u": Loading episode details from DB for episode " + str(season) + "x" + str(
|
||||
episode), logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[self.show.indexerid, season, episode])
|
||||
|
||||
@ -1825,7 +1827,7 @@ class TVEpisode(object):
|
||||
|
||||
# delete myself from the DB
|
||||
logger.log(u"Deleting myself from the database", logger.DEBUG)
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
sql = "DELETE FROM tv_episodes WHERE showid=" + str(self.show.indexerid) + " AND season=" + str(
|
||||
self.season) + " AND episode=" + str(self.episode)
|
||||
myDB.action(sql)
|
||||
@ -1844,8 +1846,7 @@ class TVEpisode(object):
|
||||
logger.log(str(self.show.indexerid) + u": Not creating SQL queue - record is not dirty", logger.DEBUG)
|
||||
return
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select(
|
||||
'SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
||||
[self.show.indexerid, self.season, self.episode])
|
||||
@ -1894,8 +1895,6 @@ class TVEpisode(object):
|
||||
|
||||
logger.log(u"STATUS IS " + str(self.status), logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
newValueDict = {"indexerid": self.indexerid,
|
||||
"indexer": self.indexer,
|
||||
"name": self.name,
|
||||
@ -1918,6 +1917,7 @@ class TVEpisode(object):
|
||||
"episode": self.episode}
|
||||
|
||||
# use a custom update/insert method to get the data into the DB
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.upsert("tv_episodes", newValueDict, controlValueDict)
|
||||
|
||||
def fullPath(self):
|
||||
@ -2378,11 +2378,9 @@ class TVEpisode(object):
|
||||
# save any changes to the databas
|
||||
sql_l = []
|
||||
with self.lock:
|
||||
sql_l.append(self.get_sql())
|
||||
|
||||
for relEp in self.relatedEps:
|
||||
for relEp in [self] + self.relatedEps:
|
||||
sql_l.append(relEp.get_sql())
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
@ -49,18 +49,19 @@ class CacheDBConnection(db.DBConnection):
|
||||
def __init__(self, providerName):
|
||||
db.DBConnection.__init__(self, "cache.db")
|
||||
|
||||
with self as myDB:
|
||||
# Create the table if it's not already there
|
||||
try:
|
||||
if not self.hasTable(providerName):
|
||||
self.action("CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT)")
|
||||
if not myDB.hasTable(providerName):
|
||||
myDB.action("CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT)")
|
||||
except Exception, e:
|
||||
if str(e) != "table [" + providerName + "] already exists":
|
||||
raise
|
||||
|
||||
# Create the table if it's not already there
|
||||
try:
|
||||
if not self.hasTable('lastUpdate'):
|
||||
self.action("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)")
|
||||
if not myDB.hasTable('lastUpdate'):
|
||||
myDB.action("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)")
|
||||
except Exception, e:
|
||||
if str(e) != "table lastUpdate already exists":
|
||||
raise
|
||||
@ -276,9 +277,9 @@ class TVCache():
|
||||
|
||||
season = episodes = None
|
||||
if parse_result.air_by_date or parse_result.sports:
|
||||
myDB = db.DBConnection()
|
||||
|
||||
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
sql_results = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
|
||||
[parse_result.show.indexerid, parse_result.show.indexer, airdate])
|
||||
|
@ -283,12 +283,11 @@ class Manage:
|
||||
|
||||
@cherrypy.expose
|
||||
def showEpisodeStatuses(self, indexer_id, whichStatus):
|
||||
myDB = db.DBConnection()
|
||||
|
||||
status_list = [int(whichStatus)]
|
||||
if status_list[0] == SNATCHED:
|
||||
status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
cur_show_results = myDB.select(
|
||||
"SELECT season, episode, name FROM tv_episodes WHERE showid = ? AND season != 0 AND status IN (" + ','.join(
|
||||
['?'] * len(status_list)) + ")", [int(indexer_id)] + status_list)
|
||||
@ -324,7 +323,7 @@ class Manage:
|
||||
if not status_list:
|
||||
return _munge(t)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
status_results = myDB.select(
|
||||
"SELECT show_name, tv_shows.indexer_id as indexer_id FROM tv_episodes, tv_shows WHERE tv_episodes.status IN (" + ','.join(
|
||||
['?'] * len(
|
||||
@ -372,8 +371,7 @@ class Manage:
|
||||
|
||||
to_change[indexer_id].append(what)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
for cur_indexer_id in to_change:
|
||||
|
||||
# get a list of all the eps we want to change if they just said "all"
|
||||
@ -389,8 +387,7 @@ class Manage:
|
||||
|
||||
@cherrypy.expose
|
||||
def showSubtitleMissed(self, indexer_id, whichSubs):
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
cur_show_results = myDB.select(
|
||||
"SELECT season, episode, name, subtitles FROM tv_episodes WHERE showid = ? AND season != 0 AND status LIKE '%4'",
|
||||
[int(indexer_id)])
|
||||
@ -431,7 +428,7 @@ class Manage:
|
||||
if not whichSubs:
|
||||
return _munge(t)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
status_results = myDB.select(
|
||||
"SELECT show_name, tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles FROM tv_episodes, tv_shows WHERE tv_shows.subtitles = 1 AND tv_episodes.status LIKE '%4' AND tv_episodes.season != 0 AND tv_episodes.showid = tv_shows.indexer_id ORDER BY show_name")
|
||||
|
||||
@ -482,7 +479,7 @@ class Manage:
|
||||
for cur_indexer_id in to_download:
|
||||
# get a list of all the eps we want to download subtitles if they just said "all"
|
||||
if 'all' in to_download[cur_indexer_id]:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
all_eps_results = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE status LIKE '%4' AND season != 0 AND showid = ?",
|
||||
[cur_indexer_id])
|
||||
@ -512,12 +509,11 @@ class Manage:
|
||||
t = PageTemplate(file="manage_backlogOverview.tmpl")
|
||||
t.submenu = ManageMenu()
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
showCounts = {}
|
||||
showCats = {}
|
||||
showSQLResults = {}
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
for curShow in sickbeard.showList:
|
||||
|
||||
epCounts = {}
|
||||
@ -859,7 +855,7 @@ class Manage:
|
||||
@cherrypy.expose
|
||||
def failedDownloads(self, limit=100, toRemove=None):
|
||||
|
||||
myDB = db.DBConnection("failed.db")
|
||||
with db.DBConnection('failed.db') as myDB:
|
||||
|
||||
if limit == "0":
|
||||
sqlResults = myDB.select("SELECT * FROM failed")
|
||||
@ -886,9 +882,8 @@ class History:
|
||||
@cherrypy.expose
|
||||
def index(self, limit=100):
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
# sqlResults = myDB.select("SELECT h.*, show_name, name FROM history h, tv_shows s, tv_episodes e WHERE h.showid=s.indexer_id AND h.showid=e.showid AND h.season=e.season AND h.episode=e.episode ORDER BY date DESC LIMIT "+str(numPerPage*(p-1))+", "+str(numPerPage))
|
||||
with db.DBConnection() as myDB:
|
||||
if limit == "0":
|
||||
sqlResults = myDB.select(
|
||||
"SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC")
|
||||
@ -959,8 +954,9 @@ class History:
|
||||
@cherrypy.expose
|
||||
def clearHistory(self):
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.action("DELETE FROM history WHERE 1=1")
|
||||
|
||||
ui.notifications.message('History cleared')
|
||||
redirect("/history/")
|
||||
|
||||
@ -968,9 +964,10 @@ class History:
|
||||
@cherrypy.expose
|
||||
def trimHistory(self):
|
||||
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.action("DELETE FROM history WHERE date < " + str(
|
||||
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(history.dateFormat)))
|
||||
|
||||
ui.notifications.message('Removed history entries greater than 30 days old')
|
||||
redirect("/history/")
|
||||
|
||||
@ -1440,7 +1437,6 @@ class ConfigProviders:
|
||||
return providerDict[name].getID() + '|' + providerDict[name].configStr()
|
||||
|
||||
else:
|
||||
|
||||
newProvider = newznab.NewznabProvider(name, url, key=key)
|
||||
sickbeard.newznabProviderList.append(newProvider)
|
||||
return newProvider.getID() + '|' + newProvider.configStr()
|
||||
@ -2231,8 +2227,6 @@ class NewHomeAddShows:
|
||||
t = PageTemplate(file="home_massAddTable.tmpl")
|
||||
t.submenu = HomeMenu()
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
if not rootDir:
|
||||
return "No folders selected."
|
||||
elif type(rootDir) != list:
|
||||
@ -2255,6 +2249,7 @@ class NewHomeAddShows:
|
||||
|
||||
dir_list = []
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
for root_dir in root_dirs:
|
||||
try:
|
||||
file_list = ek.ek(os.listdir, root_dir)
|
||||
@ -2632,7 +2627,7 @@ class Home:
|
||||
if 'callback' in kwargs and '_' in kwargs:
|
||||
callback, _ = kwargs['callback'], kwargs['_']
|
||||
else:
|
||||
return "Error: Unsupported Request. Send jsonp request with 'callback' variable in the query stiring."
|
||||
return "Error: Unsupported Request. Send jsonp request with 'callback' variable in the query string."
|
||||
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
|
||||
cherrypy.response.headers['Content-Type'] = 'text/javascript'
|
||||
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
|
||||
@ -2877,8 +2872,9 @@ class Home:
|
||||
def loadShowNotifyLists(self):
|
||||
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
|
||||
|
||||
mydb = db.DBConnection()
|
||||
rows = mydb.select("SELECT show_id, show_name, notify_list FROM tv_shows ORDER BY show_name ASC")
|
||||
with db.DBConnection() as myDB:
|
||||
rows = myDB.select("SELECT show_id, show_name, notify_list FROM tv_shows ORDER BY show_name ASC")
|
||||
|
||||
data = {}
|
||||
size = 0
|
||||
for r in rows:
|
||||
@ -2995,8 +2991,7 @@ class Home:
|
||||
|
||||
showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.indexerid)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
seasonResults = myDB.select(
|
||||
"SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season desc",
|
||||
[showObj.indexerid]
|
||||
@ -3115,7 +3110,8 @@ class Home:
|
||||
|
||||
@cherrypy.expose
|
||||
def plotDetails(self, show, season, episode):
|
||||
result = db.DBConnection().action(
|
||||
with db.DBConnection() as myDB:
|
||||
result = myDB.action(
|
||||
"SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
(int(show), int(season), int(episode))).fetchone()
|
||||
return result['description'] if result else 'Episode not found.'
|
||||
@ -3578,7 +3574,7 @@ class Home:
|
||||
sql_l.append(epObj.get_sql())
|
||||
|
||||
if sql_l:
|
||||
myDB = db.DBConnection()
|
||||
with db.DBConnection() as myDB:
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
if int(status) == WANTED:
|
||||
@ -3678,11 +3674,10 @@ class Home:
|
||||
except exceptions.ShowDirNotFoundException:
|
||||
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
if eps is None:
|
||||
redirect("/home/displayShow?show=" + show)
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
for curEp in eps.split('|'):
|
||||
|
||||
epInfo = curEp.split('x')
|
||||
@ -4000,8 +3995,6 @@ class WebInterface:
|
||||
@cherrypy.expose
|
||||
def comingEpisodes(self, layout="None"):
|
||||
|
||||
myDB = db.DBConnection()
|
||||
|
||||
today1 = datetime.date.today()
|
||||
today = today1.toordinal()
|
||||
next_week1 = (datetime.date.today() + datetime.timedelta(days=7))
|
||||
@ -4010,9 +4003,12 @@ class WebInterface:
|
||||
|
||||
done_show_list = []
|
||||
qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED]
|
||||
|
||||
with db.DBConnection() as myDB:
|
||||
sql_results = myDB.select(
|
||||
"SELECT *, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_episodes.status NOT IN (" + ','.join(
|
||||
['?'] * len(qualList)) + ")", [today, next_week] + qualList)
|
||||
|
||||
for cur_result in sql_results:
|
||||
done_show_list.append(int(cur_result["showid"]))
|
||||
|
||||
@ -4099,14 +4095,12 @@ class WebInterface:
|
||||
ical += 'X-WR-CALDESC:SickRage\r\n'
|
||||
ical += 'PRODID://Sick-Beard Upcoming Episodes//\r\n'
|
||||
|
||||
# Get shows info
|
||||
myDB = db.DBConnection()
|
||||
|
||||
# Limit dates
|
||||
past_date = (datetime.date.today() + datetime.timedelta(weeks=-52)).toordinal()
|
||||
future_date = (datetime.date.today() + datetime.timedelta(weeks=52)).toordinal()
|
||||
|
||||
# Get all the shows that are not paused and are currently on air (from kjoconnor Fork)
|
||||
with db.DBConnection() as myDB:
|
||||
calendar_shows = myDB.select(
|
||||
"SELECT show_name, indexer_id, network, airs, runtime FROM tv_shows WHERE ( status = 'Continuing' OR status = 'Returning Series' ) AND paused != '1'")
|
||||
for show in calendar_shows:
|
||||
|
Loading…
Reference in New Issue
Block a user