2014-03-10 01:18:05 -04:00
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import os
import shutil
import stat
import sickbeard
from sickbeard import postProcessor
from sickbeard import db , helpers , exceptions
from sickbeard import encodingKludge as ek
from sickbeard . exceptions import ex
from sickbeard import logger
from sickbeard . name_parser . parser import NameParser , InvalidNameException
from sickbeard import common
from sickbeard import failedProcessor
from lib . unrar2 import RarFile , RarInfo
from lib . unrar2 . rar_exceptions import *
2014-03-25 01:57:24 -04:00
def logHelper ( logMessage , logLevel = logger . MESSAGE ) :
2014-03-10 01:18:05 -04:00
logger . log ( logMessage , logLevel )
return logMessage + u " \n "
2014-03-25 01:57:24 -04:00
2014-03-25 21:42:36 -04:00
def processDir ( dirName , nzbName = None , process_method = None , force = False , is_priority = None , failed = False , type = " auto " ) :
2014-03-10 01:18:05 -04:00
"""
Scans through the files in dirName and processes whatever media files it finds
2014-03-21 04:34:00 -04:00
dirName : The folder name to look in
2014-03-10 01:18:05 -04:00
nzbName : The NZB name which resulted in this folder being downloaded
force : True to postprocess already postprocessed files
failed : Boolean for whether or not the download failed
type : Type of postprocessing auto or manual
"""
global process_result , returnStr
2014-03-16 18:57:38 -04:00
2014-03-10 01:18:05 -04:00
returnStr = ' '
returnStr + = logHelper ( u " Processing folder " + dirName , logger . DEBUG )
returnStr + = logHelper ( u " TV_DOWNLOAD_DIR: " + sickbeard . TV_DOWNLOAD_DIR , logger . DEBUG )
# if they passed us a real dir then assume it's the one we want
if ek . ek ( os . path . isdir , dirName ) :
dirName = ek . ek ( os . path . realpath , dirName )
# if the client and Sickbeard are not on the same machine translate the Dir in a network dir
elif sickbeard . TV_DOWNLOAD_DIR and ek . ek ( os . path . isdir , sickbeard . TV_DOWNLOAD_DIR ) \
and ek . ek ( os . path . normpath , dirName ) != ek . ek ( os . path . normpath , sickbeard . TV_DOWNLOAD_DIR ) :
dirName = ek . ek ( os . path . join , sickbeard . TV_DOWNLOAD_DIR , ek . ek ( os . path . abspath , dirName ) . split ( os . path . sep ) [ - 1 ] )
returnStr + = logHelper ( u " Trying to use folder " + dirName , logger . DEBUG )
# if we didn't find a real dir then quit
if not ek . ek ( os . path . isdir , dirName ) :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper (
u " Unable to figure out what folder to process. If your downloader and Sick Beard aren ' t on the same PC make sure you fill out your TV download dir in the config. " ,
logger . DEBUG )
2014-03-10 01:18:05 -04:00
return returnStr
path , dirs , files = get_path_dir_files ( dirName , nzbName , type )
returnStr + = logHelper ( u " PostProcessing Path: " + path , logger . DEBUG )
returnStr + = logHelper ( u " PostProcessing Dirs: " + str ( dirs ) , logger . DEBUG )
rarFiles = filter ( helpers . isRarFile , files )
rarContent = unRAR ( path , rarFiles , force )
files + = rarContent
videoFiles = filter ( helpers . isMediaFile , files )
videoInRar = filter ( helpers . isMediaFile , rarContent )
returnStr + = logHelper ( u " PostProcessing Files: " + str ( files ) , logger . DEBUG )
returnStr + = logHelper ( u " PostProcessing VideoFiles: " + str ( videoFiles ) , logger . DEBUG )
returnStr + = logHelper ( u " PostProcessing RarContent: " + str ( rarContent ) , logger . DEBUG )
returnStr + = logHelper ( u " PostProcessing VideoInRar: " + str ( videoInRar ) , logger . DEBUG )
# If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten).
nzbNameOriginal = nzbName
if len ( videoFiles ) > = 2 :
nzbName = None
if not process_method :
process_method = sickbeard . PROCESS_METHOD
process_result = True
#Don't Link media when the media is extracted from a rar in the same path
if process_method in ( ' hardlink ' , ' symlink ' ) and videoInRar :
2014-03-25 21:42:36 -04:00
process_media ( path , videoInRar , nzbName , ' move ' , force , is_priority )
2014-03-20 06:24:58 -04:00
delete_files ( path , rarContent )
2014-03-10 01:18:05 -04:00
for video in set ( videoFiles ) - set ( videoInRar ) :
2014-03-25 21:42:36 -04:00
process_media ( path , [ video ] , nzbName , process_method , force , is_priority )
2014-03-10 01:18:05 -04:00
else :
for video in videoFiles :
2014-03-25 21:42:36 -04:00
process_media ( path , [ video ] , nzbName , process_method , force , is_priority )
2014-03-10 01:18:05 -04:00
#Process Video File in all TV Subdir
for dir in [ x for x in dirs if validateDir ( path , x , nzbNameOriginal , failed ) ] :
process_result = True
for processPath , processDir , fileList in ek . ek ( os . walk , ek . ek ( os . path . join , path , dir ) , topdown = False ) :
rarFiles = filter ( helpers . isRarFile , fileList )
rarContent = unRAR ( processPath , rarFiles , force )
fileList = set ( fileList + rarContent )
videoFiles = filter ( helpers . isMediaFile , fileList )
videoInRar = filter ( helpers . isMediaFile , rarContent )
notwantedFiles = [ x for x in fileList if x not in videoFiles ]
#Don't Link media when the media is extracted from a rar in the same path
if process_method in ( ' hardlink ' , ' symlink ' ) and videoInRar :
2014-03-25 21:42:36 -04:00
process_media ( processPath , videoInRar , nzbName , ' move ' , force , is_priority )
2014-03-25 01:57:24 -04:00
process_media ( processPath , set ( videoFiles ) - set ( videoInRar ) , nzbName , process_method , force ,
2014-03-25 21:42:36 -04:00
is_priority )
2014-03-20 06:24:58 -04:00
delete_files ( processPath , rarContent )
2014-03-10 01:18:05 -04:00
else :
2014-03-25 21:42:36 -04:00
process_media ( processPath , videoFiles , nzbName , process_method , force , is_priority )
2014-03-10 01:18:05 -04:00
#Delete all file not needed
if process_method != " move " or not process_result \
2014-03-25 01:57:24 -04:00
or type == " manual " : #Avoid to delete files if is Manual PostProcessing
2014-03-10 01:18:05 -04:00
continue
2014-03-20 06:24:58 -04:00
2014-03-10 01:18:05 -04:00
delete_files ( processPath , notwantedFiles )
2014-03-20 06:24:58 -04:00
2014-03-10 01:18:05 -04:00
if process_method == " move " and \
2014-03-25 01:57:24 -04:00
ek . ek ( os . path . normpath , processPath ) != ek . ek ( os . path . normpath ,
sickbeard . TV_DOWNLOAD_DIR ) :
2014-05-02 17:26:26 -04:00
if processPath != sickbeard . TV_DOWNLOAD_DIR :
delete_dir ( processPath )
2014-03-20 06:24:58 -04:00
2014-03-10 01:18:05 -04:00
return returnStr
2014-03-25 01:57:24 -04:00
def validateDir ( path , dirName , nzbNameOriginal , failed ) :
2014-03-10 01:18:05 -04:00
global process_result , returnStr
returnStr + = logHelper ( u " Processing folder " + dirName , logger . DEBUG )
if ek . ek ( os . path . basename , dirName ) . startswith ( ' _FAILED_ ' ) :
returnStr + = logHelper ( u " The directory name indicates it failed to extract. " , logger . DEBUG )
failed = True
elif ek . ek ( os . path . basename , dirName ) . startswith ( ' _UNDERSIZED_ ' ) :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper ( u " The directory name indicates that it was previously rejected for being undersized. " ,
logger . DEBUG )
2014-03-10 01:18:05 -04:00
failed = True
elif ek . ek ( os . path . basename , dirName ) . startswith ( ' _UNPACK_ ' ) :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper ( u " The directory name indicates that this release is in the process of being unpacked. " ,
logger . DEBUG )
2014-03-10 01:18:05 -04:00
if failed :
process_failed ( os . path . join ( path , dirName ) , nzbNameOriginal )
return False
# make sure the dir isn't inside a show dir
myDB = db . DBConnection ( )
sqlResults = myDB . select ( " SELECT * FROM tv_shows " )
for sqlShow in sqlResults :
2014-03-25 01:57:24 -04:00
if dirName . lower ( ) . startswith (
ek . ek ( os . path . realpath , sqlShow [ " location " ] ) . lower ( ) + os . sep ) or dirName . lower ( ) == ek . ek (
os . path . realpath , sqlShow [ " location " ] ) . lower ( ) :
returnStr + = logHelper (
u " You ' re trying to post process an episode that ' s already been moved to its show dir, skipping " ,
logger . ERROR )
2014-03-10 01:18:05 -04:00
return False
# Get the videofile list for the next checks
allFiles = [ ]
allDirs = [ ]
for processPath , processDir , fileList in ek . ek ( os . walk , ek . ek ( os . path . join , path , dirName ) , topdown = False ) :
allDirs + = processDir
allFiles + = fileList
videoFiles = filter ( helpers . isMediaFile , allFiles )
allDirs . append ( dirName )
2014-03-20 06:24:58 -04:00
2014-03-10 01:18:05 -04:00
#check if the dir have at least one tv video file
for video in videoFiles :
try :
NameParser ( ) . parse ( video )
return True
except InvalidNameException :
pass
for dir in allDirs :
try :
NameParser ( ) . parse ( dir )
return True
except InvalidNameException :
pass
if sickbeard . UNPACK :
#Search for packed release
packedFiles = filter ( helpers . isRarFile , allFiles )
for packed in packedFiles :
try :
NameParser ( ) . parse ( packed )
return True
except InvalidNameException :
pass
return False
2014-03-25 01:57:24 -04:00
def unRAR ( path , rarFiles , force ) :
2014-03-10 01:18:05 -04:00
global process_result , returnStr
unpacked_files = [ ]
if sickbeard . UNPACK and rarFiles :
returnStr + = logHelper ( u " Packed Releases detected: " + str ( rarFiles ) , logger . DEBUG )
for archive in rarFiles :
returnStr + = logHelper ( u " Unpacking archive: " + archive , logger . DEBUG )
try :
rar_handle = RarFile ( os . path . join ( path , archive ) )
# Skip extraction if any file in archive has previously been extracted
skip_file = False
for file_in_archive in [ os . path . basename ( x . filename ) for x in rar_handle . infolist ( ) if not x . isdir ] :
if already_postprocessed ( path , file_in_archive , force ) :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper (
u " Archive file already post-processed, extraction skipped: " + file_in_archive ,
logger . DEBUG )
2014-03-10 01:18:05 -04:00
skip_file = True
break
if skip_file :
2014-03-20 06:24:58 -04:00
continue
2014-03-25 01:57:24 -04:00
rar_handle . extract ( path = path , withSubpath = False , overwrite = False )
2014-03-10 01:18:05 -04:00
unpacked_files + = [ os . path . basename ( x . filename ) for x in rar_handle . infolist ( ) if not x . isdir ]
del rar_handle
except Exception , e :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper ( u " Failed Unrar archive " + archive + ' : ' + ex ( e ) , logger . ERROR )
process_result = False
continue
2014-03-10 01:18:05 -04:00
returnStr + = logHelper ( u " UnRar content: " + str ( unpacked_files ) , logger . DEBUG )
return unpacked_files
2014-03-25 01:57:24 -04:00
def already_postprocessed ( dirName , videofile , force ) :
2014-03-10 01:18:05 -04:00
global returnStr
if force :
return False
#Needed for accessing DB with a unicode DirName
if not isinstance ( dirName , unicode ) :
dirName = unicode ( dirName , ' utf_8 ' )
# Avoid processing the same dir again if we use a process method <> move
myDB = db . DBConnection ( )
sqlResult = myDB . select ( " SELECT * FROM tv_episodes WHERE release_name = ? " , [ dirName ] )
if sqlResult :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper ( u " You ' re trying to post process a dir that ' s already been processed, skipping " ,
logger . DEBUG )
2014-03-10 01:18:05 -04:00
return True
# This is needed for video whose name differ from dirName
if not isinstance ( videofile , unicode ) :
videofile = unicode ( videofile , ' utf_8 ' )
sqlResult = myDB . select ( " SELECT * FROM tv_episodes WHERE release_name = ? " , [ videofile . rpartition ( ' . ' ) [ 0 ] ] )
if sqlResult :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper ( u " You ' re trying to post process a video that ' s already been processed, skipping " ,
logger . DEBUG )
2014-03-10 01:18:05 -04:00
return True
#Needed if we have downloaded the same episode @ different quality
search_sql = " SELECT tv_episodes.indexerid, history.resource FROM tv_episodes INNER JOIN history ON history.showid=tv_episodes.showid "
search_sql + = " WHERE history.season=tv_episodes.season and history.episode=tv_episodes.episode "
search_sql + = " and tv_episodes.status IN ( " + " , " . join ( [ str ( x ) for x in common . Quality . DOWNLOADED ] ) + " ) "
search_sql + = " and history.resource LIKE ? "
sqlResult = myDB . select ( search_sql , [ u ' % ' + videofile ] )
if sqlResult :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper ( u " You ' re trying to post process a video that ' s already been processed, skipping " ,
logger . DEBUG )
2014-03-10 01:18:05 -04:00
return True
return False
2014-03-25 21:42:36 -04:00
def process_media ( processPath , videoFiles , nzbName , process_method , force , is_priority ) :
2014-03-10 01:18:05 -04:00
global process_result , returnStr
for cur_video_file in videoFiles :
if already_postprocessed ( processPath , cur_video_file , force ) :
continue
cur_video_file_path = ek . ek ( os . path . join , processPath , cur_video_file )
try :
2014-03-25 21:42:36 -04:00
processor = postProcessor . PostProcessor ( cur_video_file_path , nzbName , process_method , is_priority )
2014-03-10 01:18:05 -04:00
process_result = processor . process ( )
process_fail_message = " "
except exceptions . PostProcessingFailed , e :
process_result = False
process_fail_message = ex ( e )
returnStr + = processor . log
if process_result :
returnStr + = logHelper ( u " Processing succeeded for " + cur_video_file_path )
else :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper ( u " Processing failed for " + cur_video_file_path + " : " + process_fail_message ,
logger . WARNING )
2014-03-10 01:18:05 -04:00
#If something fail abort the processing on dir
if not process_result :
break
2014-03-25 01:57:24 -04:00
def delete_files ( processPath , notwantedFiles ) :
2014-03-10 01:18:05 -04:00
global returnStr , process_result
if not process_result :
return
#Delete all file not needed
for cur_file in notwantedFiles :
cur_file_path = ek . ek ( os . path . join , processPath , cur_file )
if not ek . ek ( os . path . isfile , cur_file_path ) :
2014-03-25 01:57:24 -04:00
continue #Prevent error when a notwantedfiles is an associated files
2014-03-10 01:18:05 -04:00
returnStr + = logHelper ( u " Deleting file " + cur_file , logger . DEBUG )
2014-03-25 01:57:24 -04:00
#check first the read-only attribute
2014-03-10 01:18:05 -04:00
file_attribute = ek . ek ( os . stat , cur_file_path ) [ 0 ]
if ( not file_attribute & stat . S_IWRITE ) :
# File is read-only, so make it writeable
returnStr + = logHelper ( u " Changing ReadOnly Flag for file " + cur_file , logger . DEBUG )
try :
2014-03-25 01:57:24 -04:00
ek . ek ( os . chmod , cur_file_path , stat . S_IWRITE )
2014-03-10 01:18:05 -04:00
except OSError , e :
2014-04-27 19:56:36 -04:00
returnStr + = logHelper ( u " Cannot change permissions of " + cur_file_path + ' : ' + str ( e . strerror ) ,
2014-03-25 01:57:24 -04:00
logger . DEBUG )
2014-03-10 01:18:05 -04:00
try :
ek . ek ( os . remove , cur_file_path )
except OSError , e :
2014-04-27 19:56:36 -04:00
returnStr + = logHelper ( u " Unable to delete file " + cur_file + ' : ' + str ( e . strerror ) , logger . DEBUG )
2014-03-10 01:18:05 -04:00
2014-03-25 01:57:24 -04:00
def delete_dir ( processPath ) :
2014-03-10 01:18:05 -04:00
global returnStr
if not ek . ek ( os . listdir , processPath ) == [ ] :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper (
u " Skipping Deleting folder " + processPath + ' because some files was not deleted/processed ' , logger . DEBUG )
2014-03-10 01:18:05 -04:00
return
returnStr + = logHelper ( u " Deleting folder " + processPath , logger . DEBUG )
try :
shutil . rmtree ( processPath )
except ( OSError , IOError ) , e :
returnStr + = logHelper ( u " Warning: unable to remove the folder " + processPath + " : " + ex ( e ) , logger . WARNING )
2014-03-25 01:57:24 -04:00
def get_path_dir_files ( dirName , nzbName , type ) :
if dirName == sickbeard . TV_DOWNLOAD_DIR and not nzbName or type == " manual " : #Scheduled Post Processing Active
2014-03-10 01:18:05 -04:00
#Get at first all the subdir in the dirName
for path , dirs , files in ek . ek ( os . walk , dirName ) :
break
else :
2014-03-25 01:57:24 -04:00
path , dirs = ek . ek ( os . path . split , dirName ) #Script Post Processing
if not nzbName is None and not nzbName . endswith ( ' .nzb ' ) and os . path . isfile (
os . path . join ( dirName , nzbName ) ) : #For single torrent file without Dir
2014-03-10 01:18:05 -04:00
dirs = [ ]
files = [ os . path . join ( dirName , nzbName ) ]
else :
dirs = [ dirs ]
files = [ ]
return path , dirs , files
2014-03-25 01:57:24 -04:00
2014-03-10 01:18:05 -04:00
def process_failed ( dirName , nzbName ) :
""" Process a download that did not complete correctly """
global returnStr
if sickbeard . USE_FAILED_DOWNLOADS :
try :
processor = failedProcessor . FailedProcessor ( dirName , nzbName )
process_result = processor . process ( )
process_fail_message = " "
except exceptions . FailedProcessingFailed , e :
process_result = False
process_fail_message = ex ( e )
returnStr + = processor . log
if sickbeard . DELETE_FAILED and process_result :
delete_dir ( dirName )
if process_result :
returnStr + = logHelper ( u " Failed Download Processing succeeded: ( " + str ( nzbName ) + " , " + dirName + " ) " )
else :
2014-03-25 01:57:24 -04:00
returnStr + = logHelper (
u " Failed Download Processing failed: ( " + str ( nzbName ) + " , " + dirName + " ): " + process_fail_message ,
logger . WARNING )