diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py
index 1327737d..614e97fa 100644
--- a/sickbeard/name_parser/parser.py
+++ b/sickbeard/name_parser/parser.py
@@ -19,6 +19,7 @@
import datetime
import os.path
import re
+import threading
import regexes
import time
import sickbeard
@@ -26,6 +27,9 @@ import sickbeard
from sickbeard import logger, helpers, scene_numbering
from dateutil import parser
+nameparser_lock = threading.Lock()
+
+
class NameParser(object):
ALL_REGEX = 0
NORMAL_REGEX = 1
@@ -64,19 +68,19 @@ class NameParser(object):
def _compile_regexes(self, regexMode):
if regexMode <= self.ALL_REGEX:
- logger.log(u"Using ALL regexs" , logger.DEBUG)
- uncompiled_regex = regexes.sports_regexs+regexes.ep_regexes
+ logger.log(u"Using ALL regexs", logger.DEBUG)
+ uncompiled_regex = regexes.sports_regexs + regexes.ep_regexes
elif regexMode == self.NORMAL_REGEX:
- logger.log(u"Using NORMAL regexs" , logger.DEBUG)
+ logger.log(u"Using NORMAL regexs", logger.DEBUG)
uncompiled_regex = regexes.ep_regexes
elif regexMode == self.SPORTS_REGEX:
- logger.log(u"Using SPORTS regexs" , logger.DEBUG)
+ logger.log(u"Using SPORTS regexs", logger.DEBUG)
uncompiled_regex = regexes.sports_regexs
else:
- logger.log(u"This is a programing ERROR. Fallback Using NORMAL regexs" , logger.ERROR)
+ logger.log(u"This is a programing ERROR. Fallback Using NORMAL regexs", logger.ERROR)
uncompiled_regex = regexes.ep_regexes
for (cur_pattern_name, cur_pattern) in uncompiled_regex:
@@ -278,6 +282,7 @@ class NameParser(object):
name_parser_cache.add(name, final_result)
return final_result
+
class ParseResult(object):
def __init__(self,
original_name,
@@ -369,7 +374,7 @@ class ParseResult(object):
return to_return.encode('utf-8')
def convert(self):
- if self.air_by_date: return self # scene numbering does not apply to air-by-date
+ if self.air_by_date: return self # scene numbering does not apply to air-by-date
if self.season_number == None: return self # can't work without a season
if len(self.episode_numbers) == 0: return self # need at least one episode
@@ -380,7 +385,8 @@ class ParseResult(object):
new_episode_numbers = []
new_season_numbers = []
for epNo in self.episode_numbers:
- (s, e) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer, self.season_number, epNo)
+ (s, e) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer, self.season_number,
+ epNo)
new_episode_numbers.append(e)
new_season_numbers.append(s)
@@ -408,34 +414,34 @@ class ParseResult(object):
if self.season_number == None and len(self.episode_numbers) == 0 and self.air_date:
return True
return False
+
air_by_date = property(_is_air_by_date)
def _is_sports(self):
if self.sports_event_date:
return True
return False
+
sports = property(_is_sports)
+
class NameParserCache(object):
- #TODO: check if the fifo list can beskiped and only use one dict
- _previous_parsed_list = [] # keep a fifo list of the cached items
_previous_parsed = {}
_cache_size = 100
def add(self, name, parse_result):
- self._previous_parsed[name] = parse_result
- self._previous_parsed_list.append(name)
- while len(self._previous_parsed_list) > self._cache_size:
- time.sleep(0.01)
- del_me = self._previous_parsed_list.pop(0)
- self._previous_parsed.pop(del_me)
+ with nameparser_lock:
+ self._previous_parsed[name] = parse_result
+ _current_cache_size = len(self._previous_parsed)
+ if _current_cache_size > self._cache_size:
+ for i in range(_current_cache_size - self._cache_size):
+ del self._previous_parsed[self._previous_parsed.keys()[0]]
def get(self, name):
- if name in self._previous_parsed:
- logger.log("Using cached parse result for: " + name, logger.DEBUG)
- return self._previous_parsed[name]
- else:
- return None
+ with nameparser_lock:
+ if name in self._previous_parsed:
+ logger.log("Using cached parse result for: " + name, logger.DEBUG)
+ return self._previous_parsed[name]
name_parser_cache = NameParserCache()
diff --git a/sickbeard/network_timezones.py b/sickbeard/network_timezones.py
index f8ff1431..7e16db49 100644
--- a/sickbeard/network_timezones.py
+++ b/sickbeard/network_timezones.py
@@ -25,6 +25,7 @@ from sickbeard import encodingKludge as ek
from os.path import basename, join, isfile
import os
import re
+import time
import datetime
# regex to parse time (12/24 hour format)
@@ -169,6 +170,7 @@ def update_network_dict():
# list of sql commands to update the network_timezones table
ql = []
for cur_d, cur_t in d.iteritems():
+ time.sleep(0.01)
h_k = old_d.has_key(cur_d)
if h_k and cur_t != old_d[cur_d]:
# update old record
diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py
index 696c4e54..b16bba48 100644
--- a/sickbeard/properFinder.py
+++ b/sickbeard/properFinder.py
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see .
+import time
import datetime
import operator
@@ -84,6 +85,12 @@ class ProperFinder():
# if they haven't been added by a different provider than add the proper to the list
for x in curPropers:
+ time.sleep(0.01)
+ showObj = helpers.findCertainShow(sickbeard.showList, x.indexerid)
+ if not showObj:
+ logger.log(u"Unable to find the show we watch with indexerID " + str(x.indexerid), logger.ERROR)
+ continue
+
name = self._genericName(x.name)
if not name in propers:
@@ -105,12 +112,19 @@ class ProperFinder():
logger.log(u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG)
continue
+ if not parse_result.series_name:
+ continue
+
+ if not curProper.indexerid:
+ continue
+
if not parse_result.episode_numbers:
logger.log(
u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode",
logger.DEBUG)
continue
+
# populate our Proper instance
if parse_result.air_by_date or parse_result.sports:
curProper.season = -1
@@ -124,9 +138,6 @@ class ProperFinder():
# for each show in our list
for curShow in sickbeard.showList:
- if not parse_result.series_name:
- continue
-
genericName = self._genericName(parse_result.series_name)
# get the scene name masks
@@ -154,9 +165,6 @@ class ProperFinder():
if curProper.indexerid != -1:
break
- if curProper.indexerid == -1:
- continue
-
if not show_name_helpers.filterBadReleases(curProper.name):
logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it",
logger.DEBUG)
diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py
index 3a2ce71f..e6412dfe 100644
--- a/sickbeard/providers/btn.py
+++ b/sickbeard/providers/btn.py
@@ -17,6 +17,9 @@
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see .
+import time
+import socket
+import math
import sickbeard
import generic
@@ -30,10 +33,6 @@ from sickbeard.exceptions import ex, AuthException
from lib import jsonrpclib
from datetime import datetime
-import time
-import socket
-import math
-
class BTNProvider(generic.TorrentProvider):
def __init__(self):
@@ -69,7 +68,7 @@ class BTNProvider(generic.TorrentProvider):
return True
- def _doSearch(self, search_params, show=None, age=0):
+ def _doSearch(self, search_params, epcount=0, age=0):
self._checkAuth()
@@ -322,6 +321,7 @@ class BTNCache(tvcache.TVCache):
# By now we know we've got data and no auth errors, all we need to do is put it in the database
cl = []
for item in data:
+ time.sleep(0.01)
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
diff --git a/sickbeard/providers/dtt.py b/sickbeard/providers/dtt.py
index 93bb967b..9fe91e55 100644
--- a/sickbeard/providers/dtt.py
+++ b/sickbeard/providers/dtt.py
@@ -64,7 +64,7 @@ class DTTProvider(generic.TorrentProvider):
def _get_episode_search_strings(self, ep_obj, add_string=''):
return self._get_season_search_strings(ep_obj)
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
# show_id = self._dtt_show_id(self.show.name)
@@ -130,7 +130,7 @@ class DTTCache(tvcache.TVCache):
def _parseItem(self, item):
title, url = self.provider._get_title_and_url(item)
- logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
+ logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py
index aebf0606..21e89b19 100644
--- a/sickbeard/providers/ezrss.py
+++ b/sickbeard/providers/ezrss.py
@@ -106,7 +106,7 @@ class EZRSSProvider(generic.TorrentProvider):
return [params]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
params = {"mode": "rss"}
@@ -130,7 +130,7 @@ class EZRSSProvider(generic.TorrentProvider):
(title, url) = self._get_title_and_url(curItem)
if title and url:
- logger.log(u"Adding item from [" + self.name + "] RSS feed to cache: " + title, logger.DEBUG)
+ logger.log(u"RSS Feed provider: [" + self.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
results.append(curItem)
else:
logger.log(
@@ -180,7 +180,7 @@ class EZRSSCache(tvcache.TVCache):
(title, url) = self.provider._get_title_and_url(item)
if title and url:
- logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
+ logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
url = self._translateLinkURL(url)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index cac7c02e..731381d9 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -199,7 +199,7 @@ class GenericProvider:
quality = Quality.sceneQuality(title)
return quality
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
return []
def _get_season_search_strings(self, episode):
@@ -234,6 +234,7 @@ class GenericProvider:
results = {}
searchItems = {}
+ itemList = []
if manualSearch:
self.cache.updateCache()
@@ -241,8 +242,6 @@ class GenericProvider:
for epObj in episodes:
time.sleep(0.01)
- itemList = []
-
cacheResult = self.cache.searchCache(epObj, manualSearch)
if len(cacheResult):
results.update(cacheResult)
@@ -256,9 +255,9 @@ class GenericProvider:
if seasonSearch:
for curString in self._get_season_search_strings(epObj):
- itemList += self._doSearch(curString)
+ itemList += self._doSearch(curString, len(episodes))
for curString in self._get_episode_search_strings(epObj):
- itemList += self._doSearch(curString)
+ itemList += self._doSearch(curString, len(episodes))
# next episode if no search results
if not itemList:
@@ -328,7 +327,7 @@ class GenericProvider:
# make sure we want the episode
wantEp = True
for epNo in actual_episodes:
- if not show.wantEpisode(actual_season, epNo, quality):
+ if not show.wantEpisode(actual_season, epNo, quality, manualSearch):
wantEp = False
break
diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py
index 4efeba74..463e5886 100644
--- a/sickbeard/providers/hdbits.py
+++ b/sickbeard/providers/hdbits.py
@@ -12,7 +12,8 @@
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see .
-
+
+import time
import datetime
import urllib
import generic
@@ -86,7 +87,7 @@ class HDBitsProvider(generic.TorrentProvider):
return (title, url)
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
self._checkAuth()
@@ -206,6 +207,7 @@ class HDBitsCache(tvcache.TVCache):
ql = []
for item in items:
+ time.sleep(0.01)
ci = self._parseItem(item)
if ci is not None:
ql.append(ci)
diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py
index 0439f512..6ac8861d 100644
--- a/sickbeard/providers/hdtorrents.py
+++ b/sickbeard/providers/hdtorrents.py
@@ -17,6 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see .
+import time
import re
import traceback
import datetime
@@ -112,7 +113,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj):
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
@@ -153,7 +154,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -348,6 +349,7 @@ class HDTorrentsCache(tvcache.TVCache):
cl = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -364,7 +366,7 @@ class HDTorrentsCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + str(title), logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index c89e09c4..42d8d940 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see .
+import time
import re
import traceback
import datetime
@@ -92,7 +93,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj):
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
@@ -134,7 +135,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -294,6 +295,7 @@ class IPTorrentsCache(tvcache.TVCache):
cl = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -310,7 +312,7 @@ class IPTorrentsCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + title, logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index 8fab3768..3bd373c5 100644
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -19,6 +19,7 @@
from __future__ import with_statement
+import time
import sys
import os
import traceback
@@ -165,7 +166,7 @@ class KATProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj):
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(allPossibleShowNames(self.show)):
@@ -211,7 +212,7 @@ class KATProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -268,7 +269,7 @@ class KATProvider(generic.TorrentProvider):
#Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
if mode == 'Season':
- ep_number = int(len(search_params['Episode']) / len(set(allPossibleShowNames(self.show))))
+ ep_number = int(epcount / len(set(allPossibleShowNames(self.show))))
title = self._find_season_quality(title, link, ep_number)
if not title or not url:
@@ -423,6 +424,7 @@ class KATCache(tvcache.TVCache):
cl = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -439,7 +441,7 @@ class KATCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + title, logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py
index 9c0c28d2..c1ec212d 100644
--- a/sickbeard/providers/newzbin.py
+++ b/sickbeard/providers/newzbin.py
@@ -356,7 +356,7 @@ class NewzbinCache(tvcache.TVCache):
logger.log("Found quality " + str(quality), logger.DEBUG)
- logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
+ logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
self._addCacheEntry(title, url, quality=quality)
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index cf59ee03..edeb20b8 100644
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -209,7 +209,7 @@ class NewznabProvider(generic.NZBProvider):
(title, url) = self._get_title_and_url(curItem)
if title and url:
- logger.log(u"Adding item from [" + self.name + "] RSS feed to cache: " + title,logger.DEBUG)
+ logger.log(u"RSS Feed provider: [" + self.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
results.append(curItem)
else:
logger.log(
diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py
index 98457307..5976ebe8 100644
--- a/sickbeard/providers/nextgen.py
+++ b/sickbeard/providers/nextgen.py
@@ -133,7 +133,7 @@ class NextGenProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj):
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
@@ -175,7 +175,7 @@ class NextGenProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -344,6 +344,7 @@ class NextGenCache(tvcache.TVCache):
cl = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -360,7 +361,7 @@ class NextGenCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + title, logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py
index 0d02a4f9..9828f151 100644
--- a/sickbeard/providers/nyaatorrents.py
+++ b/sickbeard/providers/nyaatorrents.py
@@ -18,14 +18,15 @@
import urllib
import re
+
import sickbeard
import generic
from sickbeard import show_name_helpers
from sickbeard import logger
from sickbeard.common import Quality
-from sickbeard.name_parser.parser import NameParser, InvalidNameException
from sickbeard import tvcache
+from sickbeard import show_name_helpers
REMOTE_DBG = False
@@ -143,7 +144,7 @@ class NyaaCache(tvcache.TVCache):
logger.ERROR)
return None
- logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
+ logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/publichd.py b/sickbeard/providers/publichd.py
index 14d7dec2..440fd2ea 100644
--- a/sickbeard/providers/publichd.py
+++ b/sickbeard/providers/publichd.py
@@ -18,6 +18,7 @@
from __future__ import with_statement
+import time
import sys
import os
import traceback
@@ -73,7 +74,7 @@ class PublicHDProvider(generic.TorrentProvider):
return quality
def _get_season_search_strings(self, ep_obj):
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(allPossibleShowNames(self.show)):
@@ -120,7 +121,7 @@ class PublicHDProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -315,6 +316,7 @@ class PublicHDCache(tvcache.TVCache):
ql = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -330,7 +332,7 @@ class PublicHDCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + title, logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py
index 47d9693a..e79aef37 100644
--- a/sickbeard/providers/rsstorrent.py
+++ b/sickbeard/providers/rsstorrent.py
@@ -164,5 +164,5 @@ class TorrentRssCache(tvcache.TVCache):
logger.log(u"The XML returned from the RSS feed is incomplete, this result is unusable", logger.ERROR)
return None
- logger.log(u"Adding item from [" + self.provider.name + "] RSS feed to cache: " + title, logger.DEBUG)
+ logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)
\ No newline at end of file
diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py
index b290c72d..d0d03dc2 100644
--- a/sickbeard/providers/scc.py
+++ b/sickbeard/providers/scc.py
@@ -17,6 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see .
+import time
import re
import traceback
import datetime
@@ -101,7 +102,7 @@ class SCCProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj):
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
@@ -142,7 +143,7 @@ class SCCProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -321,6 +322,7 @@ class SCCCache(tvcache.TVCache):
cl = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -337,7 +339,7 @@ class SCCCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + title, logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py
index 5a505c49..e27c9d4c 100644
--- a/sickbeard/providers/speedcd.py
+++ b/sickbeard/providers/speedcd.py
@@ -19,6 +19,7 @@
import re
import datetime
import urlparse
+import time
import sickbeard
import generic
@@ -36,7 +37,6 @@ from lib import requests
from lib.requests import exceptions
from sickbeard.helpers import sanitizeSceneName
-
class SpeedCDProvider(generic.TorrentProvider):
urls = {'base_url': 'http://speed.cd/',
@@ -93,7 +93,7 @@ class SpeedCDProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj):
#If Every episode in Season is a wanted Episode then search for Season first
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = show_name +' S%02d' % int(ep_obj.scene_season) #1) showName SXX
@@ -133,7 +133,7 @@ class SpeedCDProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -274,6 +274,7 @@ class SpeedCDCache(tvcache.TVCache):
ql = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -289,7 +290,7 @@ class SpeedCDCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + title, logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py
index 555553c3..42fe320e 100644
--- a/sickbeard/providers/thepiratebay.py
+++ b/sickbeard/providers/thepiratebay.py
@@ -18,6 +18,7 @@
from __future__ import with_statement
+import time
import re
import urllib, urllib2, urlparse
import sys
@@ -171,7 +172,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj):
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(allPossibleShowNames(self.show)) if not (ep_obj.show.air_by_date or ep_obj.show.sports) else []:
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
@@ -218,7 +219,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -262,7 +263,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
#Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
if mode == 'Season':
- ep_number = int(len(search_params['Episode']) / len(set(allPossibleShowNames(self.show))))
+ ep_number = int(epcount / len(set(allPossibleShowNames(self.show))))
title = self._find_season_quality(title, id, ep_number)
if not title or not url:
@@ -414,6 +415,7 @@ class ThePirateBayCache(tvcache.TVCache):
cl = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -430,7 +432,7 @@ class ThePirateBayCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + title, logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py
index 02684abc..654b8ac5 100644
--- a/sickbeard/providers/torrentday.py
+++ b/sickbeard/providers/torrentday.py
@@ -15,6 +15,7 @@
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see .
+import time
import json
import re
import traceback
@@ -119,7 +120,7 @@ class TorrentDayProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj):
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
@@ -160,7 +161,7 @@ class TorrentDayProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -295,6 +296,7 @@ class TorrentDayCache(tvcache.TVCache):
cl = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -311,7 +313,7 @@ class TorrentDayCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + title, logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py
index 79fce736..8a489950 100644
--- a/sickbeard/providers/torrentleech.py
+++ b/sickbeard/providers/torrentleech.py
@@ -20,6 +20,7 @@ import re
import traceback
import datetime
import urlparse
+import time
import sickbeard
import generic
from sickbeard.common import Quality
@@ -96,7 +97,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
def _get_season_search_strings(self, ep_obj):
- search_string = {'Season': [], 'Episode': []}
+ search_string = {'Season': []}
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
@@ -137,7 +138,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
return [search_string]
- def _doSearch(self, search_params, show=None, age=None):
+ def _doSearch(self, search_params, epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
@@ -294,6 +295,7 @@ class TorrentLeechCache(tvcache.TVCache):
cl = []
for result in rss_results:
+ time.sleep(0.01)
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
@@ -310,7 +312,7 @@ class TorrentLeechCache(tvcache.TVCache):
if not title or not url:
return None
- logger.log(u"Adding item to cache: " + title, logger.DEBUG)
+ logger.log(u"Attempting to cache item:" + str(title), logger.DEBUG)
return self._addCacheEntry(title, url)
diff --git a/sickbeard/scene_numbering.py b/sickbeard/scene_numbering.py
index ebe5b154..94ead7e2 100644
--- a/sickbeard/scene_numbering.py
+++ b/sickbeard/scene_numbering.py
@@ -273,25 +273,27 @@ def _xem_refresh(indexer_id, indexer):
return None
result = data
+ ql = []
+
+ cacheDB = db.DBConnection('cache.db')
if result:
- cacheDB = db.DBConnection('cache.db')
- cacheDB.action("INSERT OR REPLACE INTO xem_refresh (indexer, indexer_id, last_refreshed) VALUES (?,?,?)",
- [indexer, indexer_id, time.time()])
+ ql.append(["INSERT OR REPLACE INTO xem_refresh (indexer, indexer_id, last_refreshed) VALUES (?,?,?)",
+ [indexer, indexer_id, time.time()]])
if 'success' in result['result']:
- cacheDB.action("DELETE FROM xem_numbering where indexer = ? and indexer_id = ?", [indexer, indexer_id])
+ ql.append(["DELETE FROM xem_numbering where indexer = ? and indexer_id = ?", [indexer, indexer_id]])
for entry in result['data']:
if 'scene' in entry:
- cacheDB.action(
+ ql.append([
"INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)",
[indexer, indexer_id, entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode'],
- entry['scene']['season'], entry['scene']['episode']])
+ entry['scene']['season'], entry['scene']['episode']]])
if 'scene_2' in entry: # for doubles
- cacheDB.action(
+ ql.append([
"INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)",
[indexer, indexer_id, entry[sickbeard.indexerApi(indexer).config['xem_origin']]['season'],
entry[sickbeard.indexerApi(indexer).config['xem_origin']]['episode'],
- entry['scene_2']['season'], entry['scene_2']['episode']])
+ entry['scene_2']['season'], entry['scene_2']['episode']]])
else:
logger.log(u'Failed to get XEM scene data for show %s from %s because "%s"' % (
indexer_id, sickbeard.indexerApi(indexer).name, result['message']), logger.DEBUG)
@@ -304,6 +306,8 @@ def _xem_refresh(indexer_id, indexer):
logger.log(traceback.format_exc(), logger.DEBUG)
return None
+ if ql:
+ cacheDB.mass_action(ql)
def get_xem_numbering_for_show(indexer_id, indexer):
"""
@@ -392,4 +396,5 @@ def fix_scene_numbering():
ql.append(
["UPDATE tv_episodes SET scene_episode = ? WHERE indexerid = ?", [scene_episode, epResult["indexerid"]]])
+ if ql:
myDB.mass_action(ql)
diff --git a/sickbeard/search.py b/sickbeard/search.py
index 8c91d99b..9476bb11 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -350,10 +350,11 @@ def filterSearchResults(show, results):
lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show),
results[curEp])
- if curEp in foundResults:
- foundResults[curEp] += results[curEp]
- else:
- foundResults[curEp] = results[curEp]
+ if len(results[curEp]):
+ if curEp in foundResults:
+ foundResults[curEp] += results[curEp]
+ else:
+ foundResults[curEp] = results[curEp]
return foundResults
@@ -362,6 +363,7 @@ def searchProviders(show, season, episodes, curProvider, seasonSearch=False, man
logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season))
foundResults = {}
+ finalResults = []
if manualSearch:
curProvider.cache.updateCache()
@@ -373,25 +375,22 @@ def searchProviders(show, season, episodes, curProvider, seasonSearch=False, man
curResults = curProvider.findSearchResults(show, season, episodes, seasonSearch, manualSearch)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
- return
+ return []
except Exception, e:
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
- return
+ return []
- # finished searching this provider successfully
- didSearch = True
+ if not len(curResults):
+ return []
curResults = filterSearchResults(show, curResults)
if len(curResults):
foundResults.update(curResults)
logger.log(u"Provider search results: " + str(foundResults), logger.DEBUG)
- if not didSearch:
- logger.log(u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
- logger.ERROR)
-
- finalResults = []
+ if not len(foundResults):
+ return []
anyQualities, bestQualities = Quality.splitQuality(show.quality)
@@ -401,8 +400,8 @@ def searchProviders(show, season, episodes, curProvider, seasonSearch=False, man
bestSeasonNZB = pickBestResult(foundResults[SEASON_RESULT], show, anyQualities + bestQualities)
highest_quality_overall = 0
- for cur_season in foundResults:
- for cur_result in foundResults[cur_season]:
+ for cur_episode in foundResults:
+ for cur_result in foundResults[cur_episode]:
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
highest_quality_overall = cur_result.quality
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py
index 3bbf239c..84982807 100644
--- a/sickbeard/search_queue.py
+++ b/sickbeard/search_queue.py
@@ -36,7 +36,6 @@ RSS_SEARCH = 20
FAILED_SEARCH = 30
MANUAL_SEARCH = 30
-
class SearchQueue(generic_queue.GenericQueue):
def __init__(self):
generic_queue.GenericQueue.__init__(self)
@@ -87,30 +86,44 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, ep_obj):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
-
self.ep_obj = ep_obj
-
self.success = None
def execute(self):
generic_queue.QueueItem.execute(self)
- with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
- foundResults = list(executor.map(self.process, [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]))
+
+ foundResults = []
+ didSearch = False
+
+ providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
+
+ try:
+ with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
+ foundResults = list(
+ executor.map(self.process, providers))
+ didSearch = True
+ except Exception, e:
+ pass
+
+ if not didSearch:
+ logger.log(
+ u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
+ logger.ERROR)
result = False
- if not foundResults:
+ if not len(foundResults):
if self.ep_obj.show.air_by_date:
ui.notifications.message('No downloads were found ...',
"Couldn't find a download for %s" % self.ep_obj.prettyABName())
logger.log(u"Unable to find a download for " + self.ep_obj.prettyABDName())
else:
ui.notifications.message('No downloads were found ...',
- "Couldn't find a download for %s" % self.ep_obj.prettyName())
+ "Couldn't find a download for %s" % self.ep_obj.prettyName())
logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
self.success = result
else:
- for foundResult in foundResults:
+ for foundResult in [item for sublist in foundResults for item in sublist]:
time.sleep(0.01)
# just use the first result for now
@@ -135,7 +148,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
else:
logger.log("Beginning manual search for " + self.ep_obj.prettyName())
- return search.searchProviders(self.ep_obj.show, self.ep_obj.season, self.ep_obj, curProvider, True, False)
+ return search.searchProviders(self.ep_obj.show, self.ep_obj.season, [self.ep_obj], curProvider, False, True)
def finish(self):
# don't let this linger if something goes wrong
@@ -150,14 +163,32 @@ class RSSSearchQueueItem(generic_queue.QueueItem):
def execute(self):
generic_queue.QueueItem.execute(self)
- with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
- foundResults = list(executor.map(self.process, [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]))
- for curResult in foundResults:
- time.sleep(0.01)
+ foundResults = []
+ didSearch = False
- if curResult:
+ providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
+
+ try:
+ with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
+ foundResults = list(
+ executor.map(self.process, providers))
+
+ didSearch = True
+ except:
+ pass
+
+ if not didSearch:
+ logger.log(
+ u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
+ logger.ERROR)
+
+ if len(foundResults):
+ for curResult in [item for sublist in foundResults for item in sublist]:
+ time.sleep(0.01)
search.snatchEpisode(curResult)
+ else:
+ logger.log(u"RSS Feed search found nothing to snatch ...")
generic_queue.QueueItem.finish(self)
@@ -236,14 +267,31 @@ class BacklogQueueItem(generic_queue.QueueItem):
def execute(self):
generic_queue.QueueItem.execute(self)
- with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
- foundResults = sum(list(executor.map(self.process, [x for x in sickbeard.providers.sortedProviderList() if x.isActive()])))
- for curResult in foundResults if foundResults else logger.log(
- u"Backlog search found nothing to snatch ..."):
- time.sleep(0.01)
+ foundResults = []
+ didSearch = False
- search.snatchEpisode(curResult)
+ providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
+
+ try:
+ with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
+ foundResults = list(executor.map(self.process,providers))
+ didSearch = True
+ except:
+ pass
+
+ if not didSearch:
+ logger.log(
+ u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
+ logger.ERROR)
+
+ if len(foundResults):
+ for curResult in [item for sublist in foundResults for item in sublist]:
+ time.sleep(0.01)
+
+ search.snatchEpisode(curResult)
+ else:
+ logger.log(u"Backlog search found nothing to snatch ...")
self.finish()
@@ -254,7 +302,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
if len(seasonEps) == len(self.wantedEpisodes):
seasonSearch = True
- return search.searchProviders(self.show, self.segment, self.wantedEpisodes, curProvider, False, seasonSearch)
+ return search.searchProviders(self.show, self.segment, self.wantedEpisodes, curProvider, seasonSearch, False)
def _need_any_episodes(self, statusResults, bestQualities):
wantedEpisodes = []
@@ -294,14 +342,32 @@ class FailedQueueItem(generic_queue.QueueItem):
def execute(self):
generic_queue.QueueItem.execute(self)
- with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
- foundResults = list(executor.map(self.process, [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]))
- # download whatever we find
- for curResult in foundResults:
- time.sleep(0.01)
+ foundResults = []
+ didSearch = False
- self.success = search.snatchEpisode(curResult)
+ providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
+
+ try:
+ with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
+ foundResults = list(
+ executor.map(self.process, providers))
+ didSearch = True
+ except:
+ pass
+
+ if not didSearch:
+ logger.log(
+ u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
+ logger.ERROR)
+
+ if len(foundResults):
+ for curResult in [item for sublist in foundResults for item in sublist]:
+ time.sleep(0.01)
+
+ self.success = search.snatchEpisode(curResult)
+ else:
+ logger.log(u"Retry failed download search found nothing to snatch ...")
self.finish()
@@ -315,7 +381,7 @@ class FailedQueueItem(generic_queue.QueueItem):
logger.log("Beginning manual search for " + epObj.prettyABDName())
else:
logger.log(
- "Beginning failed download search for " + epObj.prettyName())
+ "Beginning failed download search for " + epObj.prettyName())
(release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode)
if release:
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index caddb100..e94ba7b2 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -440,6 +440,7 @@ class TVShow(object):
sql_l = []
for season in showObj:
+ time.sleep(0.01)
scannedEps[season] = {}
for episode in showObj[season]:
# need some examples of wtf episode 0 means to decide if we want it or not
diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py
index 59c5d58f..cd3f61ac 100644
--- a/sickbeard/tvcache.py
+++ b/sickbeard/tvcache.py
@@ -143,6 +143,7 @@ class TVCache():
items = data.entries
ql = []
for item in items:
+ time.sleep(0.01)
qi = self._parseItem(item)
if qi is not None:
ql.append(qi)
@@ -224,7 +225,7 @@ class TVCache():
if cacheResult:
logger.log(u"Found Indexer ID:[" + repr(cacheResult) + "], using that for [" + str(name) + "}",
logger.DEBUG)
- return
+ return None
# if we don't have complete info then parse the filename to get it
try:
@@ -244,7 +245,7 @@ class TVCache():
showObj = sickbeard.name_cache.retrieveShowFromCache(parse_result.series_name)
if not showObj:
- logger.log(u"Cache lookup failed for [" + parse_result.series_name + "], skipping ...", logger.DEBUG)
+ logger.log(u"Show is not in our list of watched shows [" + parse_result.series_name + "], not caching ...", logger.DEBUG)
return None
season = episodes = None
@@ -296,8 +297,7 @@ class TVCache():
if date != None:
sql += " AND time >= " + str(int(time.mktime(date.timetuple())))
- #return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
- return myDB.select(sql)
+ return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
def findNeededEpisodes(self, epObj=None, manualSearch=False):
neededEps = {}
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index d5bea869..2f1001d8 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -3153,6 +3153,7 @@ class Home:
sql_l = []
for curEp in eps.split('|'):
+ time.sleep(0.01)
logger.log(u"Attempting to set status on episode " + curEp + " to " + status, logger.DEBUG)