2014-03-10 01:18:05 -04:00
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import datetime
import sickbeard
import os . path
from sickbeard import db , common , helpers , logger
from sickbeard import encodingKludge as ek
from sickbeard . name_parser . parser import NameParser , InvalidNameException
2014-03-25 01:57:24 -04:00
MIN_DB_VERSION = 9 # oldest db version we support migrating from
2014-04-24 01:18:16 -04:00
MAX_DB_VERSION = 29
2014-03-10 01:18:05 -04:00
2014-03-25 01:57:24 -04:00
class MainSanityCheck ( db . DBSanityCheck ) :
2014-03-10 01:18:05 -04:00
def check ( self ) :
2014-04-22 14:32:45 -04:00
self . fix_missing_table_indexes ( )
2014-03-10 01:18:05 -04:00
self . fix_duplicate_shows ( )
self . fix_duplicate_episodes ( )
self . fix_orphan_episodes ( )
def fix_duplicate_shows ( self ) :
2014-03-25 01:57:24 -04:00
sqlResults = self . connection . select (
" SELECT show_id, indexer_id, COUNT(indexer_id) as count FROM tv_shows GROUP BY indexer_id HAVING count > 1 " )
2014-03-10 01:18:05 -04:00
for cur_duplicate in sqlResults :
2014-03-25 01:57:24 -04:00
logger . log ( u " Duplicate show detected! indexer_id: " + str ( cur_duplicate [ " indexer_id " ] ) + u " count: " + str (
cur_duplicate [ " count " ] ) , logger . DEBUG )
2014-03-10 01:18:05 -04:00
2014-03-25 01:57:24 -04:00
cur_dupe_results = self . connection . select (
" SELECT show_id, indexer_id FROM tv_shows WHERE indexer_id = ? LIMIT ? " ,
[ cur_duplicate [ " indexer_id " ] , int ( cur_duplicate [ " count " ] ) - 1 ]
)
2014-03-10 01:18:05 -04:00
for cur_dupe_id in cur_dupe_results :
2014-03-25 01:57:24 -04:00
logger . log (
u " Deleting duplicate show with indexer_id: " + str ( cur_dupe_id [ " indexer_id " ] ) + u " show_id: " + str (
cur_dupe_id [ " show_id " ] ) )
2014-03-10 01:18:05 -04:00
self . connection . action ( " DELETE FROM tv_shows WHERE show_id = ? " , [ cur_dupe_id [ " show_id " ] ] )
else :
logger . log ( u " No duplicate show, check passed " )
def fix_duplicate_episodes ( self ) :
2014-03-25 01:57:24 -04:00
sqlResults = self . connection . select (
" SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1 " )
2014-03-10 01:18:05 -04:00
for cur_duplicate in sqlResults :
2014-03-25 01:57:24 -04:00
logger . log ( u " Duplicate episode detected! showid: " + str ( cur_duplicate [ " showid " ] ) + u " season: " + str (
cur_duplicate [ " season " ] ) + u " episode: " + str ( cur_duplicate [ " episode " ] ) + u " count: " + str (
cur_duplicate [ " count " ] ) , logger . DEBUG )
2014-03-10 01:18:05 -04:00
2014-03-25 01:57:24 -04:00
cur_dupe_results = self . connection . select (
" SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ? " ,
[ cur_duplicate [ " showid " ] , cur_duplicate [ " season " ] , cur_duplicate [ " episode " ] ,
int ( cur_duplicate [ " count " ] ) - 1 ]
)
2014-03-10 01:18:05 -04:00
for cur_dupe_id in cur_dupe_results :
logger . log ( u " Deleting duplicate episode with episode_id: " + str ( cur_dupe_id [ " episode_id " ] ) )
self . connection . action ( " DELETE FROM tv_episodes WHERE episode_id = ? " , [ cur_dupe_id [ " episode_id " ] ] )
else :
logger . log ( u " No duplicate episode, check passed " )
def fix_orphan_episodes ( self ) :
2014-03-25 01:57:24 -04:00
sqlResults = self . connection . select (
" SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL " )
2014-03-10 01:18:05 -04:00
for cur_orphan in sqlResults :
2014-03-25 01:57:24 -04:00
logger . log ( u " Orphan episode detected! episode_id: " + str ( cur_orphan [ " episode_id " ] ) + " showid: " + str (
cur_orphan [ " showid " ] ) , logger . DEBUG )
logger . log ( u " Deleting orphan episode with episode_id: " + str ( cur_orphan [ " episode_id " ] ) )
2014-03-10 01:18:05 -04:00
self . connection . action ( " DELETE FROM tv_episodes WHERE episode_id = ? " , [ cur_orphan [ " episode_id " ] ] )
else :
logger . log ( u " No orphan episodes, check passed " )
2014-04-22 14:32:45 -04:00
def fix_missing_table_indexes ( self ) :
2014-04-22 16:15:47 -04:00
if not self . connection . select ( " PRAGMA index_info( ' idx_indexer_id ' ) " ) :
logger . log ( u " Missing idx_indexer_id for TV Shows table detected!, fixing... " )
self . connection . action ( " CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id); " )
2014-04-22 14:32:45 -04:00
2014-04-22 16:15:47 -04:00
if not self . connection . select ( " PRAGMA index_info( ' idx_tv_episodes_showid_airdate ' ) " ) :
logger . log ( u " Missing idx_tv_episodes_showid_airdate for TV Episodes table detected!, fixing... " )
self . connection . action ( " CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate); " )
2014-04-22 14:32:45 -04:00
2014-04-22 16:15:47 -04:00
if not self . connection . select ( " PRAGMA index_info( ' idx_showid ' ) " ) :
logger . log ( u " Missing idx_showid for TV Episodes table detected!, fixing... " )
self . connection . action ( " CREATE INDEX idx_showid ON tv_episodes (showid); " )
2014-03-25 01:57:24 -04:00
2014-04-22 19:09:29 -04:00
if not self . connection . select ( " PRAGMA index_info( ' idx_status ' ) " ) :
logger . log ( u " Missing idx_status for TV Episodes table detected!, fixing... " )
self . connection . action ( " CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate) " )
if not self . connection . select ( " PRAGMA index_info( ' idx_sta_epi_air ' ) " ) :
logger . log ( u " Missing idx_sta_epi_air for TV Episodes table detected!, fixing... " )
self . connection . action ( " CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate) " )
if not self . connection . select ( " PRAGMA index_info( ' idx_sta_epi_sta_air ' ) " ) :
logger . log ( u " Missing idx_sta_epi_sta_air for TV Episodes table detected!, fixing... " )
self . connection . action ( " CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate) " )
2014-03-10 01:18:05 -04:00
def backupDatabase ( version ) :
logger . log ( u " Backing up database before upgrade " )
if not helpers . backupVersionedFile ( db . dbFilename ( ) , version ) :
logger . log_error_and_exit ( u " Database backup failed, abort upgrading database " )
else :
logger . log ( u " Proceeding with upgrade " )
2014-03-25 01:57:24 -04:00
2014-03-10 01:18:05 -04:00
# ======================
# = Main DB Migrations =
# ======================
# Add new migrations at the bottom of the list; subclass the previous migration.
2014-03-25 01:57:24 -04:00
class InitialSchema ( db . SchemaUpgrade ) :
2014-03-10 01:18:05 -04:00
def test ( self ) :
return self . hasTable ( " db_version " )
def execute ( self ) :
if not self . hasTable ( " tv_shows " ) and not self . hasTable ( " db_version " ) :
queries = [
" CREATE TABLE db_version (db_version INTEGER); " ,
" CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT); " ,
2014-03-11 14:23:20 -04:00
" CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC) " ,
" CREATE TABLE info (last_backlog NUMERIC, last_tvdb NUMERIC, last_proper_search NUMERIC); " ,
" CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, tvdbid NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC) " ,
" CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_tvdb NUMERIC) " ,
2014-03-10 01:18:05 -04:00
" CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate); " ,
" CREATE INDEX idx_showid ON tv_episodes (showid); " ,
2014-03-11 14:23:20 -04:00
" CREATE UNIQUE INDEX idx_tvdb_id ON tv_shows (tvdb_id); " ,
2014-03-11 15:28:44 -04:00
" INSERT INTO db_version (db_version) VALUES (18); "
2014-03-25 01:57:24 -04:00
]
2014-03-10 01:18:05 -04:00
for query in queries :
self . connection . action ( query )
else :
cur_db_version = self . checkDBVersion ( )
if cur_db_version < MIN_DB_VERSION :
2014-03-25 01:57:24 -04:00
logger . log_error_and_exit ( u " Your database version ( " + str (
cur_db_version ) + " ) is too old to migrate from what this version of Sick Beard supports ( " + \
2014-03-10 01:18:05 -04:00
str ( MIN_DB_VERSION ) + " ). \n " + \
" Upgrade using a previous version (tag) build 496 to build 501 of Sick Beard first or remove database file to begin fresh. "
2014-03-25 01:57:24 -04:00
)
2014-03-10 01:18:05 -04:00
if cur_db_version > MAX_DB_VERSION :
2014-03-25 01:57:24 -04:00
logger . log_error_and_exit ( u " Your database version ( " + str (
cur_db_version ) + " ) has been incremented past what this version of Sick Beard supports ( " + \
2014-03-10 01:18:05 -04:00
str ( MAX_DB_VERSION ) + " ). \n " + \
" If you have used other forks of Sick Beard, your database may be unusable due to their modifications. "
2014-03-25 01:57:24 -04:00
)
2014-03-10 01:18:05 -04:00
class AddSizeAndSceneNameFields ( InitialSchema ) :
def test ( self ) :
return self . checkDBVersion ( ) > = 10
def execute ( self ) :
backupDatabase ( 11 )
if not self . hasColumn ( " tv_episodes " , " file_size " ) :
self . addColumn ( " tv_episodes " , " file_size " )
if not self . hasColumn ( " tv_episodes " , " release_name " ) :
self . addColumn ( " tv_episodes " , " release_name " , " TEXT " , " " )
ep_results = self . connection . select ( " SELECT episode_id, location, file_size FROM tv_episodes " )
logger . log ( u " Adding file size to all episodes in DB, please be patient " )
for cur_ep in ep_results :
if not cur_ep [ " location " ] :
continue
# if there is no size yet then populate it for us
if ( not cur_ep [ " file_size " ] or not int ( cur_ep [ " file_size " ] ) ) and ek . ek ( os . path . isfile , cur_ep [ " location " ] ) :
cur_size = ek . ek ( os . path . getsize , cur_ep [ " location " ] )
2014-03-25 01:57:24 -04:00
self . connection . action ( " UPDATE tv_episodes SET file_size = ? WHERE episode_id = ? " ,
[ cur_size , int ( cur_ep [ " episode_id " ] ) ] )
2014-03-10 01:18:05 -04:00
# check each snatch to see if we can use it to get a release name from
history_results = self . connection . select ( " SELECT * FROM history WHERE provider != -1 ORDER BY date ASC " )
logger . log ( u " Adding release name to all episodes still in history " )
for cur_result in history_results :
# find the associated download, if there isn't one then ignore it
2014-03-25 01:57:24 -04:00
download_results = self . connection . select (
" SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ? " ,
[ cur_result [ " showid " ] , cur_result [ " season " ] , cur_result [ " episode " ] , cur_result [ " date " ] ] )
2014-03-10 01:18:05 -04:00
if not download_results :
2014-03-25 01:57:24 -04:00
logger . log ( u " Found a snatch in the history for " + cur_result [
" resource " ] + " but couldn ' t find the associated download, skipping it " , logger . DEBUG )
2014-03-10 01:18:05 -04:00
continue
nzb_name = cur_result [ " resource " ]
file_name = ek . ek ( os . path . basename , download_results [ 0 ] [ " resource " ] )
# take the extension off the filename, it's not needed
if ' . ' in file_name :
file_name = file_name . rpartition ( ' . ' ) [ 0 ]
# find the associated episode on disk
2014-03-25 01:57:24 -04:00
ep_results = self . connection . select (
" SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ' ' " ,
[ cur_result [ " showid " ] , cur_result [ " season " ] , cur_result [ " episode " ] ] )
2014-03-10 01:18:05 -04:00
if not ep_results :
2014-03-25 01:57:24 -04:00
logger . log (
u " The episode " + nzb_name + " was found in history but doesn ' t exist on disk anymore, skipping " ,
logger . DEBUG )
2014-03-10 01:18:05 -04:00
continue
# get the status/quality of the existing ep and make sure it's what we expect
ep_status , ep_quality = common . Quality . splitCompositeStatus ( int ( ep_results [ 0 ] [ " status " ] ) )
if ep_status != common . DOWNLOADED :
continue
if ep_quality != int ( cur_result [ " quality " ] ) :
continue
# make sure this is actually a real release name and not a season pack or something
for cur_name in ( nzb_name , file_name ) :
2014-03-25 01:57:24 -04:00
logger . log ( u " Checking if " + cur_name + " is actually a good release name " , logger . DEBUG )
2014-03-10 01:18:05 -04:00
try :
np = NameParser ( False )
parse_result = np . parse ( cur_name )
except InvalidNameException :
continue
2014-03-20 14:03:22 -04:00
if parse_result . series_name and parse_result . season_number != None and parse_result . episode_numbers and parse_result . release_group :
2014-03-10 01:18:05 -04:00
# if all is well by this point we'll just put the release name into the database
2014-03-25 01:57:24 -04:00
self . connection . action ( " UPDATE tv_episodes SET release_name = ? WHERE episode_id = ? " ,
[ cur_name , ep_results [ 0 ] [ " episode_id " ] ] )
2014-03-10 01:18:05 -04:00
break
# check each snatch to see if we can use it to get a release name from
empty_results = self . connection . select ( " SELECT episode_id, location FROM tv_episodes WHERE release_name = ' ' " )
logger . log ( u " Adding release name to all episodes with obvious scene filenames " )
for cur_result in empty_results :
ep_file_name = ek . ek ( os . path . basename , cur_result [ " location " ] )
ep_file_name = os . path . splitext ( ep_file_name ) [ 0 ]
# only want to find real scene names here so anything with a space in it is out
if ' ' in ep_file_name :
continue
try :
np = NameParser ( False )
parse_result = np . parse ( ep_file_name )
except InvalidNameException :
continue
if not parse_result . release_group :
continue
2014-03-25 01:57:24 -04:00
logger . log (
u " Name " + ep_file_name + " gave release group of " + parse_result . release_group + " , seems valid " ,
logger . DEBUG )
self . connection . action ( " UPDATE tv_episodes SET release_name = ? WHERE episode_id = ? " ,
[ ep_file_name , cur_result [ " episode_id " ] ] )
2014-03-10 01:18:05 -04:00
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
class RenameSeasonFolders ( AddSizeAndSceneNameFields ) :
2014-03-10 01:18:05 -04:00
def test ( self ) :
return self . checkDBVersion ( ) > = 11
def execute ( self ) :
# rename the column
self . connection . action ( " ALTER TABLE tv_shows RENAME TO tmp_tv_shows " )
2014-03-25 01:57:24 -04:00
self . connection . action (
" CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT) " )
2014-03-11 14:23:20 -04:00
sql = " INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows "
2014-03-10 01:18:05 -04:00
self . connection . action ( sql )
# flip the values to be opposite of what they were before
self . connection . action ( " UPDATE tv_shows SET flatten_folders = 2 WHERE flatten_folders = 1 " )
self . connection . action ( " UPDATE tv_shows SET flatten_folders = 1 WHERE flatten_folders = 0 " )
self . connection . action ( " UPDATE tv_shows SET flatten_folders = 0 WHERE flatten_folders = 2 " )
self . connection . action ( " DROP TABLE tmp_tv_shows " )
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-03-10 01:18:05 -04:00
class AddSubtitlesSupport ( RenameSeasonFolders ) :
def test ( self ) :
return self . checkDBVersion ( ) > = 12
def execute ( self ) :
self . addColumn ( " tv_shows " , " subtitles " )
self . addColumn ( " tv_episodes " , " subtitles " , " TEXT " , " " )
self . addColumn ( " tv_episodes " , " subtitles_searchcount " )
self . addColumn ( " tv_episodes " , " subtitles_lastsearch " , " TIMESTAMP " , str ( datetime . datetime . min ) )
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-03-10 01:18:05 -04:00
class AddIMDbInfo ( RenameSeasonFolders ) :
def test ( self ) :
return self . checkDBVersion ( ) > = 13
def execute ( self ) :
2014-03-25 01:57:24 -04:00
self . connection . action (
" CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC) " )
2014-03-10 01:18:05 -04:00
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-03-10 01:18:05 -04:00
class Add1080pAndRawHDQualities ( AddIMDbInfo ) :
""" Add support for 1080p related qualities along with RawHD
Quick overview of what the upgrade needs to do :
quality | old | new
- - - - - - - - - - - - - - - - - - - - - - - - - -
hdwebdl | 1 << 3 | 1 << 5
hdbluray | 1 << 4 | 1 << 7
fullhdbluray | 1 << 5 | 1 << 8
- - - - - - - - - - - - - - - - - - - - - - - - - -
rawhdtv | | 1 << 3
fullhdtv | | 1 << 4
fullhdwebdl | | 1 << 6
"""
def test ( self ) :
return self . checkDBVersion ( ) > = 14
def _update_status ( self , old_status ) :
( status , quality ) = common . Quality . splitCompositeStatus ( old_status )
return common . Quality . compositeStatus ( status , self . _update_quality ( quality ) )
def _update_quality ( self , old_quality ) :
""" Update bitwise flags to reflect new quality values
Check flag bits ( clear old then set their new locations ) starting
with the highest bits so we dont overwrite data we need later on
"""
result = old_quality
# move fullhdbluray from 1<<5 to 1<<8 if set
2014-03-25 01:57:24 -04:00
if ( result & ( 1 << 5 ) ) :
result = result & ~ ( 1 << 5 )
result = result | ( 1 << 8 )
2014-03-10 01:18:05 -04:00
# move hdbluray from 1<<4 to 1<<7 if set
2014-03-25 01:57:24 -04:00
if ( result & ( 1 << 4 ) ) :
result = result & ~ ( 1 << 4 )
result = result | ( 1 << 7 )
2014-03-10 01:18:05 -04:00
# move hdwebdl from 1<<3 to 1<<5 if set
2014-03-25 01:57:24 -04:00
if ( result & ( 1 << 3 ) ) :
result = result & ~ ( 1 << 3 )
result = result | ( 1 << 5 )
2014-03-10 01:18:05 -04:00
return result
def _update_composite_qualities ( self , status ) :
""" Unpack, Update, Return new quality values
Unpack the composite archive / initial values .
Update either qualities if needed .
Then return the new compsite quality value .
"""
best = ( status & ( 0xffff << 16 ) ) >> 16
initial = status & ( 0xffff )
best = self . _update_quality ( best )
initial = self . _update_quality ( initial )
result = ( ( best << 16 ) | initial )
return result
def execute ( self ) :
backupDatabase ( self . checkDBVersion ( ) )
# update the default quality so we dont grab the wrong qualities after migration
sickbeard . QUALITY_DEFAULT = self . _update_composite_qualities ( sickbeard . QUALITY_DEFAULT )
sickbeard . save_config ( )
# upgrade previous HD to HD720p -- shift previous qualities to new placevalues
2014-03-25 01:57:24 -04:00
old_hd = common . Quality . combineQualities (
[ common . Quality . HDTV , common . Quality . HDWEBDL >> 2 , common . Quality . HDBLURAY >> 3 ] , [ ] )
new_hd = common . Quality . combineQualities ( [ common . Quality . HDTV , common . Quality . HDWEBDL , common . Quality . HDBLURAY ] ,
[ ] )
2014-03-10 01:18:05 -04:00
# update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template
2014-03-25 01:57:24 -04:00
old_any = common . Quality . combineQualities (
[ common . Quality . SDTV , common . Quality . SDDVD , common . Quality . HDTV , common . Quality . HDWEBDL >> 2 ,
common . Quality . HDBLURAY >> 3 , common . Quality . UNKNOWN ] , [ ] )
new_any = common . Quality . combineQualities (
[ common . Quality . SDTV , common . Quality . SDDVD , common . Quality . HDTV , common . Quality . FULLHDTV ,
common . Quality . HDWEBDL , common . Quality . FULLHDWEBDL , common . Quality . HDBLURAY , common . Quality . FULLHDBLURAY ,
common . Quality . UNKNOWN ] , [ ] )
2014-03-10 01:18:05 -04:00
# update qualities (including templates)
logger . log ( u " [1/4] Updating pre-defined templates and the quality for each show... " , logger . MESSAGE )
ql = [ ]
shows = self . connection . select ( " SELECT * FROM tv_shows " )
for cur_show in shows :
if cur_show [ " quality " ] == old_hd :
new_quality = new_hd
elif cur_show [ " quality " ] == old_any :
new_quality = new_any
else :
new_quality = self . _update_composite_qualities ( cur_show [ " quality " ] )
ql . append ( [ " UPDATE tv_shows SET quality = ? WHERE show_id = ? " , [ new_quality , cur_show [ " show_id " ] ] ] )
self . connection . mass_action ( ql )
# update status that are are within the old hdwebdl (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768)
logger . log ( u " [2/4] Updating the status for the episodes within each show... " , logger . MESSAGE )
ql = [ ]
episodes = self . connection . select ( " SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800 " )
for cur_episode in episodes :
2014-03-25 01:57:24 -04:00
ql . append ( [ " UPDATE tv_episodes SET status = ? WHERE episode_id = ? " ,
[ self . _update_status ( cur_episode [ " status " ] ) , cur_episode [ " episode_id " ] ] ] )
2014-03-10 01:18:05 -04:00
self . connection . mass_action ( ql )
# make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together
# update previous history so it shows the correct action
logger . log ( u " [3/4] Updating history to reflect the correct action... " , logger . MESSAGE )
ql = [ ]
historyAction = self . connection . select ( " SELECT * FROM history WHERE action < 3276800 AND action >= 800 " )
for cur_entry in historyAction :
2014-03-25 01:57:24 -04:00
ql . append ( [ " UPDATE history SET action = ? WHERE showid = ? AND date = ? " ,
[ self . _update_status ( cur_entry [ " action " ] ) , cur_entry [ " showid " ] , cur_entry [ " date " ] ] ] )
2014-03-10 01:18:05 -04:00
self . connection . mass_action ( ql )
# update previous history so it shows the correct quality
logger . log ( u " [4/4] Updating history to reflect the correct quality... " , logger . MESSAGE )
ql = [ ]
historyQuality = self . connection . select ( " SELECT * FROM history WHERE quality < 32768 AND quality >= 8 " )
for cur_entry in historyQuality :
2014-03-25 01:57:24 -04:00
ql . append ( [ " UPDATE history SET quality = ? WHERE showid = ? AND date = ? " ,
[ self . _update_quality ( cur_entry [ " quality " ] ) , cur_entry [ " showid " ] , cur_entry [ " date " ] ] ] )
2014-03-10 01:18:05 -04:00
self . connection . mass_action ( ql )
self . incDBVersion ( )
# cleanup and reduce db if any previous data was removed
logger . log ( u " Performing a vacuum on the database. " , logger . DEBUG )
self . connection . action ( " VACUUM " )
2014-03-25 01:57:24 -04:00
2014-03-10 01:18:05 -04:00
class AddProperNamingSupport ( Add1080pAndRawHDQualities ) :
def test ( self ) :
return self . checkDBVersion ( ) > = 15
def execute ( self ) :
self . addColumn ( " tv_episodes " , " is_proper " )
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-03-10 01:18:05 -04:00
class AddEmailSubscriptionTable ( AddProperNamingSupport ) :
def test ( self ) :
return self . hasColumn ( " tv_shows " , " notify_list " )
def execute ( self ) :
self . addColumn ( ' tv_shows ' , ' notify_list ' , ' TEXT ' , None )
self . incDBVersion ( )
2014-03-11 12:35:13 -04:00
2014-03-25 01:57:24 -04:00
2014-03-11 14:23:20 -04:00
class AddShowidTvdbidIndex ( AddEmailSubscriptionTable ) :
""" Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries """
2014-03-10 01:18:05 -04:00
def test ( self ) :
return self . checkDBVersion ( ) > = 17
def execute ( self ) :
backupDatabase ( 17 )
logger . log ( u " Check for duplicate shows before adding unique index. " )
MainSanityCheck ( self . connection ) . fix_duplicate_shows ( )
2014-03-11 14:23:20 -04:00
logger . log ( u " Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries. " )
2014-03-10 01:18:05 -04:00
if not self . hasTable ( " idx_showid " ) :
self . connection . action ( " CREATE INDEX idx_showid ON tv_episodes (showid); " )
2014-03-11 14:23:20 -04:00
if not self . hasTable ( " idx_tvdb_id " ) :
self . connection . action ( " CREATE UNIQUE INDEX idx_tvdb_id ON tv_shows (tvdb_id); " )
2014-03-10 01:18:05 -04:00
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-04-22 19:09:29 -04:00
class AddUpdateTVDB ( AddShowidTvdbidIndex ) :
2014-03-11 14:23:20 -04:00
""" Adding column last_update_tvdb to tv_shows for controlling nightly updates """
2014-03-10 01:18:05 -04:00
def test ( self ) :
return self . checkDBVersion ( ) > = 18
def execute ( self ) :
backupDatabase ( 18 )
2014-03-11 14:23:20 -04:00
logger . log ( u " Adding column last_update_tvdb to tvshows " )
if not self . hasColumn ( " tv_shows " , " last_update_tvdb " ) :
self . addColumn ( " tv_shows " , " last_update_tvdb " , default = 1 )
2014-03-10 01:18:05 -04:00
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-04-22 19:09:29 -04:00
class AddProperSearch ( AddUpdateTVDB ) :
2014-03-10 01:18:05 -04:00
def test ( self ) :
return self . checkDBVersion ( ) > = 19
2014-03-11 14:23:20 -04:00
2014-03-10 01:18:05 -04:00
def execute ( self ) :
backupDatabase ( 19 )
logger . log ( u " Adding column last_proper_search to info " )
if not self . hasColumn ( " info " , " last_proper_search " ) :
self . addColumn ( " info " , " last_proper_search " , default = 1 )
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-04-22 19:09:29 -04:00
class AddDvdOrderOption ( AddProperSearch ) :
2014-03-10 01:18:05 -04:00
def test ( self ) :
2014-04-24 01:18:16 -04:00
return self . checkDBVersion ( ) > = 20
2014-03-10 01:18:05 -04:00
def execute ( self ) :
2014-04-24 01:18:16 -04:00
backupDatabase ( 20 )
logger . log ( u " Adding column dvdorder to tvshows " )
if not self . hasColumn ( " tv_shows " , " dvdorder " ) :
self . addColumn ( " tv_shows " , " dvdorder " , " NUMERIC " , " 0 " )
2014-03-11 14:23:20 -04:00
2014-03-11 15:28:44 -04:00
self . incDBVersion ( )
2014-03-11 12:35:13 -04:00
2014-04-22 19:09:29 -04:00
class ConvertTVShowsToIndexerScheme ( AddDvdOrderOption ) :
2014-03-11 12:35:13 -04:00
def test ( self ) :
return self . checkDBVersion ( ) > = 22
def execute ( self ) :
backupDatabase ( 22 )
2014-04-24 01:18:16 -04:00
logger . log ( u " Adding column dvdorder to tvshows " )
if not self . hasColumn ( " tv_shows " , " dvdorder " ) :
self . addColumn ( " tv_shows " , " dvdorder " , " NUMERIC " , " 0 " )
2014-03-11 15:28:44 -04:00
logger . log ( u " Converting TV Shows table to Indexer Scheme... " )
2014-04-22 13:56:19 -04:00
if self . hasTable ( " tmp_tv_shows " ) :
logger . log ( u " Removing temp tv show tables left behind from previous updates... " )
2014-04-22 13:52:14 -04:00
self . connection . action ( " DROP TABLE tmp_tv_shows " )
2014-03-11 15:28:44 -04:00
self . connection . action ( " ALTER TABLE tv_shows RENAME TO tmp_tv_shows " )
2014-03-25 01:57:24 -04:00
self . connection . action (
2014-04-22 19:09:29 -04:00
" CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC) " )
2014-03-25 01:57:24 -04:00
self . connection . action (
" INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows " )
2014-03-11 15:28:44 -04:00
self . connection . action ( " DROP TABLE tmp_tv_shows " )
2014-03-11 14:23:20 -04:00
2014-04-22 14:32:45 -04:00
self . connection . action ( " CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id); " )
2014-04-22 19:09:29 -04:00
self . connection . action ( " UPDATE tv_shows SET classification = ' Scripted ' " )
self . connection . action ( " UPDATE tv_shows SET indexer = 1 " )
2014-03-11 15:52:00 -04:00
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-03-11 15:28:44 -04:00
class ConvertTVEpisodesToIndexerScheme ( ConvertTVShowsToIndexerScheme ) :
2014-03-11 14:23:20 -04:00
def test ( self ) :
return self . checkDBVersion ( ) > = 23
def execute ( self ) :
backupDatabase ( 23 )
2014-03-11 15:28:44 -04:00
logger . log ( u " Converting TV Episodes table to Indexer Scheme... " )
2014-04-22 13:56:19 -04:00
if self . hasTable ( " tmp_tv_episodes " ) :
logger . log ( u " Removing temp tv episode tables left behind from previous updates... " )
self . connection . action ( " DROP TABLE tmp_tv_episodes " )
2014-03-11 15:28:44 -04:00
self . connection . action ( " ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes " )
2014-03-25 01:57:24 -04:00
self . connection . action (
" CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC) " )
self . connection . action (
" INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes " )
2014-03-11 15:28:44 -04:00
self . connection . action ( " DROP TABLE tmp_tv_episodes " )
2014-03-11 14:23:20 -04:00
2014-04-22 14:32:45 -04:00
self . connection . action ( " CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate); " )
self . connection . action ( " CREATE INDEX idx_showid ON tv_episodes (showid); " )
2014-04-22 19:09:29 -04:00
self . connection . action ( " CREATE INDEX idx_status ON tv_episodes (status,season,episode,airdate) " )
self . connection . action ( " CREATE INDEX idx_sta_epi_air ON tv_episodes (status,episode, airdate) " )
self . connection . action ( " CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season,episode, status, airdate) " )
self . connection . action ( " UPDATE tv_episodes SET indexer = 1 " )
2014-04-22 14:32:45 -04:00
2014-03-11 15:52:00 -04:00
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-03-11 15:28:44 -04:00
class ConvertIMDBInfoToIndexerScheme ( ConvertTVEpisodesToIndexerScheme ) :
2014-03-11 14:23:20 -04:00
def test ( self ) :
return self . checkDBVersion ( ) > = 24
def execute ( self ) :
backupDatabase ( 24 )
2014-03-11 15:28:44 -04:00
logger . log ( u " Converting IMDB Info table to Indexer Scheme... " )
2014-04-22 13:56:19 -04:00
if self . hasTable ( " tmp_imdb_info " ) :
logger . log ( u " Removing temp imdb info tables left behind from previous updates... " )
self . connection . action ( " DROP TABLE tmp_imdb_info " )
2014-03-11 15:28:44 -04:00
self . connection . action ( " ALTER TABLE imdb_info RENAME TO tmp_imdb_info " )
2014-03-25 01:57:24 -04:00
self . connection . action (
" CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC) " )
self . connection . action (
" INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info " )
2014-03-11 15:28:44 -04:00
self . connection . action ( " DROP TABLE tmp_imdb_info " )
2014-03-11 14:23:20 -04:00
2014-03-11 15:52:00 -04:00
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-03-11 15:28:44 -04:00
class ConvertInfoToIndexerScheme ( ConvertIMDBInfoToIndexerScheme ) :
2014-03-11 14:23:20 -04:00
def test ( self ) :
return self . checkDBVersion ( ) > = 25
def execute ( self ) :
backupDatabase ( 25 )
2014-03-11 15:28:44 -04:00
logger . log ( u " Converting Info table to Indexer Scheme... " )
2014-04-22 13:56:19 -04:00
if self . hasTable ( " tmp_info " ) :
logger . log ( u " Removing temp info tables left behind from previous updates... " )
self . connection . action ( " DROP TABLE tmp_info " )
2014-03-11 15:28:44 -04:00
self . connection . action ( " ALTER TABLE info RENAME TO tmp_info " )
2014-03-25 01:57:24 -04:00
self . connection . action (
" CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC) " )
self . connection . action (
" INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info " )
2014-03-11 15:52:00 -04:00
self . connection . action ( " DROP TABLE tmp_info " )
2014-03-18 09:50:13 -04:00
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-03-18 09:50:13 -04:00
class AddArchiveFirstMatchOption ( ConvertInfoToIndexerScheme ) :
def test ( self ) :
return self . checkDBVersion ( ) > = 26
def execute ( self ) :
backupDatabase ( 26 )
2014-04-24 01:18:16 -04:00
logger . log ( u " Adding column archive_firstmatch to tvshows " )
if not self . hasColumn ( " tv_shows " , " archive_firstmatch " ) :
self . addColumn ( " tv_shows " , " archive_firstmatch " , " NUMERIC " , " 0 " )
2014-03-18 09:50:13 -04:00
2014-03-20 01:33:34 -04:00
self . incDBVersion ( )
2014-03-25 01:57:24 -04:00
2014-03-20 01:33:34 -04:00
class AddSceneNumbering ( AddArchiveFirstMatchOption ) :
def test ( self ) :
return self . checkDBVersion ( ) > = 27
def execute ( self ) :
backupDatabase ( 27 )
if self . hasTable ( " scene_numbering " ) :
self . connection . action ( " DROP TABLE scene_numbering " )
2014-03-25 01:57:24 -04:00
self . connection . action (
" CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode)) " )
2014-03-20 01:33:34 -04:00
2014-03-25 02:42:39 -04:00
self . incDBVersion ( )
class ConvertIndexerToInteger ( AddSceneNumbering ) :
def test ( self ) :
return self . checkDBVersion ( ) > = 28
def execute ( self ) :
backupDatabase ( 28 )
ql = [ ]
logger . log ( u " Converting Indexer to Integer ... " , logger . MESSAGE )
2014-03-25 03:05:23 -04:00
ql . append ( [ " UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ? " , [ " 1 " , " tvdb " ] ] )
ql . append ( [ " UPDATE tv_shows SET indexer = ? WHERE LOWER(indexer) = ? " , [ " 2 " , " tvrage " ] ] )
ql . append ( [ " UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ? " , [ " 1 " , " tvdb " ] ] )
ql . append ( [ " UPDATE tv_episodes SET indexer = ? WHERE LOWER(indexer) = ? " , [ " 2 " , " tvrage " ] ] )
ql . append ( [ " UPDATE scene_numbering SET indexer = ? WHERE LOWER(indexer) = ? " , [ " 1 " , " tvdb " ] ] )
ql . append ( [ " UPDATE scene_numbering SET indexer = ? WHERE LOWER(indexer) = ? " , [ " 2 " , " tvrage " ] ] )
self . connection . mass_action ( ql )
2014-03-25 02:42:39 -04:00
2014-04-24 01:18:16 -04:00
self . incDBVersion ( )
class AddRequireAndIgnoreWords ( ConvertIndexerToInteger ) :
""" Adding column rls_require_words and rls_ignore_words to tv_shows """
def test ( self ) :
return self . checkDBVersion ( ) > = 29
def execute ( self ) :
backupDatabase ( 29 )
logger . log ( u " Adding column rls_require_words to tvshows " )
if not self . hasColumn ( " tv_shows " , " rls_require_words " ) :
self . addColumn ( " tv_shows " , " rls_require_words " , " TEXT " , " " )
logger . log ( u " Adding column rls_ignore_words to tvshows " )
if not self . hasColumn ( " tv_shows " , " rls_ignore_words " ) :
self . addColumn ( " tv_shows " , " rls_ignore_words " , " TEXT " , " " )
self . incDBVersion ( )