2014-03-10 01:18:05 -04:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
|
|
|
# This file is part of Sick Beard.
|
|
|
|
#
|
|
|
|
# Sick Beard is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Sick Beard is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
2014-05-04 08:05:27 -04:00
|
|
|
import os
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
import time
|
|
|
|
import datetime
|
|
|
|
import sqlite3
|
2014-05-04 08:05:27 -04:00
|
|
|
import urllib
|
|
|
|
import urlparse
|
|
|
|
import re
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
|
2014-05-04 08:05:27 -04:00
|
|
|
from shove import Shove
|
|
|
|
from feedcache import cache
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
from sickbeard import db
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard.common import Quality
|
|
|
|
|
|
|
|
from sickbeard import helpers, show_name_helpers
|
|
|
|
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
2014-05-04 08:05:27 -04:00
|
|
|
from sickbeard.exceptions import AuthException
|
|
|
|
from sickbeard import encodingKludge as ek
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
from name_parser.parser import NameParser, InvalidNameException
|
|
|
|
|
|
|
|
|
|
|
|
class CacheDBConnection(db.DBConnection):
|
|
|
|
def __init__(self, providerName):
|
|
|
|
db.DBConnection.__init__(self, "cache.db")
|
|
|
|
|
|
|
|
# Create the table if it's not already there
|
|
|
|
try:
|
|
|
|
sql = "CREATE TABLE [" + providerName + "] (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT);"
|
|
|
|
self.connection.execute(sql)
|
|
|
|
self.connection.commit()
|
|
|
|
except sqlite3.OperationalError, e:
|
|
|
|
if str(e) != "table [" + providerName + "] already exists":
|
|
|
|
raise
|
|
|
|
|
|
|
|
# Create the table if it's not already there
|
|
|
|
try:
|
|
|
|
sql = "CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);"
|
|
|
|
self.connection.execute(sql)
|
|
|
|
self.connection.commit()
|
|
|
|
except sqlite3.OperationalError, e:
|
|
|
|
if str(e) != "table lastUpdate already exists":
|
|
|
|
raise
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
class TVCache():
|
2014-03-10 01:18:05 -04:00
|
|
|
def __init__(self, provider):
|
|
|
|
|
|
|
|
self.provider = provider
|
|
|
|
self.providerID = self.provider.getID()
|
|
|
|
self.minTime = 10
|
|
|
|
|
|
|
|
def _getDB(self):
|
|
|
|
|
|
|
|
return CacheDBConnection(self.providerID)
|
|
|
|
|
|
|
|
def _clearCache(self):
|
|
|
|
|
|
|
|
myDB = self._getDB()
|
|
|
|
|
|
|
|
myDB.action("DELETE FROM [" + self.providerID + "] WHERE 1")
|
|
|
|
|
|
|
|
def _getRSSData(self):
|
|
|
|
|
|
|
|
data = None
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2014-04-25 21:49:38 -04:00
|
|
|
def _checkAuth(self, data):
|
2014-03-10 01:18:05 -04:00
|
|
|
return True
|
|
|
|
|
|
|
|
def _checkItemAuth(self, title, url):
|
|
|
|
return True
|
|
|
|
|
2014-05-04 08:05:27 -04:00
|
|
|
def getRSSFeed(self, url, post_data=None):
|
|
|
|
# create provider storaqe cache
|
|
|
|
storage = Shove('file://' + ek.ek(os.path.join, sickbeard.CACHE_DIR, self.providerID))
|
|
|
|
fc = cache.Cache(storage)
|
|
|
|
|
|
|
|
parsed = list(urlparse.urlparse(url))
|
|
|
|
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
|
|
|
|
|
|
|
if post_data:
|
|
|
|
url = url + 'api?' + urllib.urlencode(post_data)
|
|
|
|
|
|
|
|
f = fc.fetch(url)
|
|
|
|
|
|
|
|
if not f:
|
|
|
|
logger.log(u"Error loading " + self.providerID + " URL: " + url, logger.ERROR)
|
|
|
|
return None
|
|
|
|
elif 'error' in f.feed:
|
|
|
|
logger.log(u"Newznab ERROR:[%s] CODE:[%s]" % (f.feed['error']['description'], f.feed['error']['code']),
|
|
|
|
logger.DEBUG)
|
|
|
|
return None
|
|
|
|
elif not f.entries:
|
|
|
|
logger.log(u"No items found on " + self.providerID + " using URL: " + url, logger.WARNING)
|
|
|
|
return None
|
|
|
|
|
|
|
|
storage.close()
|
|
|
|
|
|
|
|
return f
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
def updateCache(self):
|
|
|
|
|
|
|
|
if not self.shouldUpdate():
|
|
|
|
return
|
|
|
|
|
|
|
|
if self._checkAuth(None):
|
|
|
|
data = self._getRSSData()
|
|
|
|
|
|
|
|
# as long as the http request worked we count this as an update
|
|
|
|
if data:
|
|
|
|
self.setLastUpdate()
|
|
|
|
else:
|
|
|
|
return []
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
# now that we've loaded the current RSS feed lets delete the old cache
|
|
|
|
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
|
|
|
|
self._clearCache()
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-04-25 19:41:40 -04:00
|
|
|
if self._checkAuth(data):
|
2014-04-25 21:39:43 -04:00
|
|
|
items = data.entries
|
2014-05-03 05:23:26 -04:00
|
|
|
ql = []
|
2014-04-25 21:39:43 -04:00
|
|
|
for item in items:
|
2014-05-03 05:23:26 -04:00
|
|
|
qi = self._parseItem(item)
|
|
|
|
if qi is not None:
|
|
|
|
ql.append(qi)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-03 05:23:26 -04:00
|
|
|
if len(ql):
|
2014-03-10 01:18:05 -04:00
|
|
|
myDB = self._getDB()
|
2014-05-03 05:23:26 -04:00
|
|
|
myDB.mass_action(ql)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
else:
|
2014-03-25 01:57:24 -04:00
|
|
|
raise AuthException(
|
|
|
|
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
return []
|
|
|
|
|
|
|
|
def _translateTitle(self, title):
|
2014-03-25 01:57:24 -04:00
|
|
|
return title.replace(' ', '.')
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
def _translateLinkURL(self, url):
|
|
|
|
return url.replace('&', '&')
|
|
|
|
|
|
|
|
def _parseItem(self, item):
|
|
|
|
|
2014-04-25 19:41:40 -04:00
|
|
|
title = item.title
|
|
|
|
url = item.link
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
self._checkItemAuth(title, url)
|
|
|
|
|
|
|
|
if title and url:
|
|
|
|
title = self._translateTitle(title)
|
|
|
|
url = self._translateLinkURL(url)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-04 03:10:49 -04:00
|
|
|
logger.log(u"Checking if item from RSS feed is in the cache: " + title, logger.DEBUG)
|
2014-03-10 01:18:05 -04:00
|
|
|
return self._addCacheEntry(title, url)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
else:
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(
|
2014-04-26 00:08:27 -04:00
|
|
|
u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.DEBUG)
|
|
|
|
return None
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
|
|
|
|
def _getLastUpdate(self):
|
|
|
|
myDB = self._getDB()
|
|
|
|
sqlResults = myDB.select("SELECT time FROM lastUpdate WHERE provider = ?", [self.providerID])
|
|
|
|
|
|
|
|
if sqlResults:
|
|
|
|
lastTime = int(sqlResults[0]["time"])
|
2014-03-11 16:22:00 -04:00
|
|
|
if lastTime > int(time.mktime(datetime.datetime.today().timetuple())):
|
|
|
|
lastTime = 0
|
2014-03-10 01:18:05 -04:00
|
|
|
else:
|
|
|
|
lastTime = 0
|
|
|
|
|
|
|
|
return datetime.datetime.fromtimestamp(lastTime)
|
|
|
|
|
|
|
|
def setLastUpdate(self, toDate=None):
|
|
|
|
|
|
|
|
if not toDate:
|
|
|
|
toDate = datetime.datetime.today()
|
|
|
|
|
|
|
|
myDB = self._getDB()
|
|
|
|
myDB.upsert("lastUpdate",
|
|
|
|
{'time': int(time.mktime(toDate.timetuple()))},
|
|
|
|
{'provider': self.providerID})
|
|
|
|
|
|
|
|
lastUpdate = property(_getLastUpdate)
|
|
|
|
|
|
|
|
def shouldUpdate(self):
|
2014-05-04 08:05:27 -04:00
|
|
|
return True
|
2014-03-10 01:18:05 -04:00
|
|
|
# if we've updated recently then skip the update
|
|
|
|
if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(u"Last update was too soon, using old cache: today()-" + str(self.lastUpdate) + "<" + str(
|
|
|
|
datetime.timedelta(minutes=self.minTime)), logger.DEBUG)
|
2014-03-10 01:18:05 -04:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2014-04-30 09:49:50 -04:00
|
|
|
def _addCacheEntry(self, name, url, quality=None):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-04 08:05:27 -04:00
|
|
|
|
|
|
|
cacheResult = sickbeard.name_cache.retrieveNameFromCache(name)
|
|
|
|
if cacheResult:
|
|
|
|
logger.log(u"Found Indexer ID:[" + repr(cacheResult) + "], using that for [" + str(name) + "}", logger.DEBUG)
|
|
|
|
return
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
# if we don't have complete info then parse the filename to get it
|
2014-04-30 18:07:18 -04:00
|
|
|
try:
|
2014-05-01 20:57:51 -04:00
|
|
|
myParser = NameParser()
|
2014-05-03 05:23:26 -04:00
|
|
|
parse_result = myParser.parse(name)
|
2014-04-30 18:07:18 -04:00
|
|
|
except InvalidNameException:
|
|
|
|
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
|
|
|
return None
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-04-30 18:07:18 -04:00
|
|
|
if not parse_result:
|
|
|
|
logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG)
|
|
|
|
return None
|
2014-04-26 01:42:40 -04:00
|
|
|
|
2014-04-30 18:07:18 -04:00
|
|
|
if not parse_result.series_name:
|
|
|
|
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
|
|
|
|
return None
|
|
|
|
|
2014-05-03 06:36:57 -04:00
|
|
|
showObj = helpers.get_show_by_name(parse_result.series_name)
|
2014-05-03 06:37:26 -04:00
|
|
|
if not showObj:
|
2014-05-03 05:23:26 -04:00
|
|
|
logger.log(u"Could not find a show matching " + parse_result.series_name + " in the database, skipping ...", logger.DEBUG)
|
2014-04-30 21:20:53 -04:00
|
|
|
return None
|
2014-04-28 19:03:49 -04:00
|
|
|
|
2014-05-04 08:05:27 -04:00
|
|
|
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
|
|
|
|
sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
|
|
|
|
|
|
|
|
season = episodes = None
|
2014-05-03 05:23:26 -04:00
|
|
|
if parse_result.air_by_date:
|
2014-04-30 18:07:18 -04:00
|
|
|
myDB = db.DBConnection()
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-03 05:23:26 -04:00
|
|
|
airdate = parse_result.air_date.toordinal()
|
2014-05-04 08:05:27 -04:00
|
|
|
sql_results = myDB.select(
|
|
|
|
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
|
|
|
|
[showObj.indexerid, showObj.indexer, airdate])
|
2014-05-03 05:23:26 -04:00
|
|
|
if sql_results > 0:
|
|
|
|
season = int(sql_results[0]["season"])
|
|
|
|
episodes = [int(sql_results[0]["episode"])]
|
|
|
|
else:
|
|
|
|
season = parse_result.season_number
|
|
|
|
episodes = parse_result.episode_numbers
|
2014-04-26 01:42:40 -04:00
|
|
|
|
2014-05-03 05:23:26 -04:00
|
|
|
if season and episodes:
|
|
|
|
# store episodes as a seperated string
|
|
|
|
episodeText = "|" + "|".join(map(str, episodes)) + "|"
|
2014-04-29 09:14:19 -04:00
|
|
|
|
2014-05-03 05:23:26 -04:00
|
|
|
# get the current timestamp
|
|
|
|
curTimestamp = int(time.mktime(datetime.datetime.today().timetuple()))
|
2014-04-30 18:07:18 -04:00
|
|
|
|
2014-05-03 05:23:26 -04:00
|
|
|
# get quality of release
|
|
|
|
if quality is None:
|
|
|
|
quality = Quality.sceneQuality(name)
|
2014-04-30 18:07:18 -04:00
|
|
|
|
2014-05-03 05:23:26 -04:00
|
|
|
if not isinstance(name, unicode):
|
|
|
|
name = unicode(name, 'utf-8')
|
|
|
|
|
|
|
|
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
|
2014-05-04 08:05:27 -04:00
|
|
|
return [
|
|
|
|
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
|
2014-05-03 06:41:56 -04:00
|
|
|
[name, season, episodeText, showObj.indexerid, url, curTimestamp, quality]]
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-04 08:05:27 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
def searchCache(self, episode, manualSearch=False):
|
|
|
|
neededEps = self.findNeededEpisodes(episode, manualSearch)
|
2014-05-01 00:09:03 -04:00
|
|
|
return neededEps
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
def listPropers(self, date=None, delimiter="."):
|
|
|
|
|
|
|
|
myDB = self._getDB()
|
|
|
|
|
|
|
|
sql = "SELECT * FROM [" + self.providerID + "] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'"
|
|
|
|
|
2014-03-20 14:03:22 -04:00
|
|
|
if date != None:
|
2014-03-10 01:18:05 -04:00
|
|
|
sql += " AND time >= " + str(int(time.mktime(date.timetuple())))
|
|
|
|
|
|
|
|
#return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
|
|
|
|
return myDB.select(sql)
|
|
|
|
|
2014-05-04 03:10:49 -04:00
|
|
|
def findNeededEpisodes(self, epObj=None, manualSearch=False):
|
2014-03-10 01:18:05 -04:00
|
|
|
neededEps = {}
|
|
|
|
|
|
|
|
myDB = self._getDB()
|
|
|
|
|
2014-05-04 03:10:49 -04:00
|
|
|
if not epObj:
|
2014-03-10 01:18:05 -04:00
|
|
|
sqlResults = myDB.select("SELECT * FROM [" + self.providerID + "]")
|
|
|
|
else:
|
2014-03-25 01:57:24 -04:00
|
|
|
sqlResults = myDB.select(
|
|
|
|
"SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?",
|
2014-05-04 03:10:49 -04:00
|
|
|
[epObj.show.indexerid, epObj.scene_season, "%|" + str(epObj.scene_episode) + "|%"])
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
# for each cache entry
|
|
|
|
for curResult in sqlResults:
|
|
|
|
# skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well)
|
|
|
|
if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]):
|
|
|
|
continue
|
|
|
|
|
|
|
|
# get the show object, or if it's not one of our shows then ignore it
|
|
|
|
try:
|
|
|
|
showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"]))
|
|
|
|
except (MultipleShowObjectsException):
|
|
|
|
showObj = None
|
2014-04-28 18:24:37 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
if not showObj:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# get season and ep data (ignoring multi-eps for now)
|
|
|
|
curSeason = int(curResult["season"])
|
|
|
|
if curSeason == -1:
|
|
|
|
continue
|
|
|
|
curEp = curResult["episodes"].split("|")[1]
|
|
|
|
if not curEp:
|
|
|
|
continue
|
|
|
|
curEp = int(curEp)
|
|
|
|
curQuality = int(curResult["quality"])
|
|
|
|
|
|
|
|
# if the show says we want that episode then add it to the list
|
2014-04-29 09:14:19 -04:00
|
|
|
if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " +
|
|
|
|
Quality.qualityStrings[curQuality], logger.DEBUG)
|
2014-03-10 01:18:05 -04:00
|
|
|
else:
|
2014-05-04 03:10:49 -04:00
|
|
|
|
|
|
|
if not epObj:
|
|
|
|
epObj = showObj.getEpisode(curSeason, curEp)
|
|
|
|
|
|
|
|
# build a result object
|
|
|
|
title = curResult["name"]
|
|
|
|
url = curResult["url"]
|
|
|
|
|
|
|
|
logger.log(u"Found result " + title + " at " + url)
|
|
|
|
|
|
|
|
result = self.provider.getResult([epObj])
|
|
|
|
result.url = url
|
|
|
|
result.name = title
|
|
|
|
result.quality = curQuality
|
|
|
|
result.content = self.provider.getURL(url) \
|
|
|
|
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \
|
|
|
|
and not url.startswith('magnet') else None
|
|
|
|
|
|
|
|
# add it to the list
|
|
|
|
if epObj not in neededEps:
|
|
|
|
neededEps[epObj] = [result]
|
|
|
|
else:
|
|
|
|
neededEps[epObj].append(result)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
return neededEps
|