mirror of
https://github.com/moparisthebest/SickRage
synced 2024-11-13 21:05:11 -05:00
ee458bd211
Show root dirs can not be set from general config menu. Mass editing shows now has the ability to delete root dirs as well as edit them. Daily search no longer is restricted to just 1 week of results for searching from which now allows for replacing lower quality downloads with higher quality ones if available. RSS Cache is updated for each provider on demand now when performing manual, failed, backlog, or daily searches.
156 lines
4.4 KiB
Python
156 lines
4.4 KiB
Python
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
#
|
|
# This file is part of Sick Beard.
|
|
#
|
|
# Sick Beard is free software: you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# Sick Beard is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License
|
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
import urllib
|
|
import datetime
|
|
|
|
import sickbeard
|
|
import generic
|
|
|
|
from sickbeard import classes, show_name_helpers, helpers
|
|
|
|
from sickbeard import exceptions, logger
|
|
from sickbeard.common import *
|
|
from sickbeard import tvcache
|
|
from lib.dateutil.parser import parse as parseDate
|
|
|
|
|
|
class Animezb(generic.NZBProvider):
|
|
|
|
def __init__(self):
|
|
|
|
generic.NZBProvider.__init__(self, "Animezb")
|
|
|
|
self.supportsBacklog = False
|
|
self.supportsAbsoluteNumbering = True
|
|
self.anime_only = True
|
|
|
|
self.enabled = False
|
|
|
|
self.cache = AnimezbCache(self)
|
|
|
|
self.url = 'https://animezb.com/'
|
|
|
|
def isEnabled(self):
|
|
return self.enabled
|
|
|
|
def imageName(self):
|
|
return 'animezb.png'
|
|
|
|
def _get_season_search_strings(self, ep_obj):
|
|
return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
|
|
|
|
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
|
search_string = []
|
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
|
ep_string = '+'.join(
|
|
[helpers.sanitizeSceneName(show_name).replace('.', '+'), str(ep_obj.scene_absolute_number).zfill(2)])
|
|
search_string.append(ep_string)
|
|
return search_string
|
|
|
|
def _doSearch(self, search_string, epcount=0, age=0):
|
|
if self.show and not self.show.is_anime:
|
|
logger.log(u"" + str(self.show.name) + " is not an anime skiping ...")
|
|
return []
|
|
|
|
params = {
|
|
"cat": "anime",
|
|
"q": search_string.encode('utf-8'),
|
|
"max": "100"
|
|
}
|
|
|
|
search_url = self.url + "rss?" + urllib.urlencode(params)
|
|
|
|
logger.log(u"Search url: " + search_url, logger.DEBUG)
|
|
|
|
data = self.cache.getRSSFeed(search_url)
|
|
if not data:
|
|
return []
|
|
|
|
if 'entries' in data:
|
|
|
|
items = data.entries
|
|
results = []
|
|
|
|
for curItem in items:
|
|
(title, url) = self._get_title_and_url(curItem)
|
|
|
|
if title and url:
|
|
results.append(curItem)
|
|
else:
|
|
logger.log(
|
|
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
|
logger.DEBUG)
|
|
|
|
return results
|
|
|
|
return []
|
|
|
|
def findPropers(self, date=None):
|
|
|
|
results = []
|
|
|
|
for item in self._doSearch("v2 OR v3 OR v4 OR v5"):
|
|
|
|
(title, url) = self._get_title_and_url(item)
|
|
|
|
if item.has_key('published_parsed') and item['published_parsed']:
|
|
result_date = item.published_parsed
|
|
if result_date:
|
|
result_date = datetime.datetime(*result_date[0:6])
|
|
else:
|
|
logger.log(u"Unable to figure out the date for entry " + title + ", skipping it")
|
|
continue
|
|
|
|
if not date or result_date > date:
|
|
search_result = classes.Proper(title, url, result_date, self.show)
|
|
results.append(search_result)
|
|
|
|
return results
|
|
|
|
|
|
class AnimezbCache(tvcache.TVCache):
|
|
|
|
def __init__(self, provider):
|
|
|
|
tvcache.TVCache.__init__(self, provider)
|
|
|
|
# only poll Animezb every 20 minutes max
|
|
self.minTime = 20
|
|
|
|
def _getRSSData(self):
|
|
|
|
params = {
|
|
"cat": "anime".encode('utf-8'),
|
|
"max": "100".encode('utf-8')
|
|
}
|
|
|
|
rss_url = self.provider.url + 'rss?' + urllib.urlencode(params)
|
|
|
|
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
|
|
|
|
data = self.getRSSFeed(rss_url)
|
|
|
|
if data and 'entries' in data:
|
|
return data.entries
|
|
else:
|
|
return []
|
|
|
|
|
|
provider = Animezb()
|