From 4a294764152c3878927095f5c7efe955489ec7f9 Mon Sep 17 00:00:00 2001 From: echel0n Date: Mon, 21 Jul 2014 15:16:04 -0700 Subject: [PATCH] Added code to perform disposal of bs4 class when finished as well as perform garbage collection afterwards, this fixed a memory leak issue SR was experiancing during backlog/manual/failed searches. --- sickbeard/providers/bitsoup.py | 7 ++++++- sickbeard/providers/freshontv.py | 6 ++++++ sickbeard/providers/hdtorrents.py | 5 +++++ sickbeard/providers/iptorrents.py | 5 +++++ sickbeard/providers/kat.py | 20 +++++++++++++------ sickbeard/providers/publichd.py | 9 +++++++-- sickbeard/providers/scc.py | 9 +++++++++ sickbeard/providers/torrentbytes.py | 5 +++++ sickbeard/providers/torrentleech.py | 5 +++++ sickbeard/scene_numbering.py | 1 - sickbeard/tv.py | 31 +++++++++++++---------------- tests/torrent_tests.py | 9 +++++++-- 12 files changed, 83 insertions(+), 29 deletions(-) diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py index d66facd0..6e19257e 100644 --- a/sickbeard/providers/bitsoup.py +++ b/sickbeard/providers/bitsoup.py @@ -21,6 +21,7 @@ import traceback import datetime import urlparse import time +import gc import sickbeard import generic from sickbeard.common import Quality, cpu_presets @@ -174,7 +175,11 @@ class BitSoupProvider(generic.TorrentProvider): torrent_table = html.find('table', attrs={'class': 'koptekst'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] - + + # cleanup memory + html.decompose() + gc.collect() + #Continue only if one Release is found if len(torrent_rows) < 2: logger.log(u"The Data returned from " + self.name + " do not contains any torrent", diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index b52b8f92..e034c95b 100755 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -21,6 +21,7 @@ import traceback import datetime import urlparse import time +import gc import sickbeard import generic from sickbeard.common import Quality, cpu_presets @@ -199,6 +200,11 @@ class FreshOnTVProvider(generic.TorrentProvider): torrent_table = html.find('table', attrs={'class': 'frame'}) torrent_rows = torrent_table.findChildren('tr') if torrent_table else [] + + # cleanup memory + html.decompose() + gc.collect() + #Continue only if one Release is found if len(torrent_rows) < 2: logger.log(u"The Data returned from " + self.name + " do not contains any torrent", diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index f176e542..79bb8374 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -22,6 +22,7 @@ import re import traceback import datetime import urlparse +import gc import sickbeard import generic from sickbeard.common import Quality, cpu_presets @@ -203,6 +204,10 @@ class HDTorrentsProvider(generic.TorrentProvider): #Get first entry in table entries = html.find_all('td', attrs={'align': 'center'}) + # cleanup memory + html.decompose() + gc.collect() + if not entries: logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG) diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 3e2caf15..9dccd217 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -21,6 +21,7 @@ import re import traceback import datetime import urlparse +import gc import sickbeard import generic from sickbeard.common import Quality, cpu_presets @@ -182,6 +183,10 @@ class IPTorrentsProvider(generic.TorrentProvider): torrent_table = html.find('table', attrs={'class': 'torrents'}) torrents = torrent_table.find_all('tr') if torrent_table else [] + # cleanup memory + html.decompose() + gc.collect() + #Continue only if one Release is found if len(torrents) < 2: logger.log(u"The Data returned from " + self.name + " do not contains any torrent", diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index 2ecf7c34..50c7b6b1 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -19,7 +19,7 @@ from __future__ import with_statement -import time +import gc import sys import os import traceback @@ -120,8 +120,12 @@ class KATProvider(generic.TorrentProvider): return None try: - soup = BeautifulSoup(data, features=["html5lib", "permissive"]) - file_table = soup.find('table', attrs={'class': 'torrentFileList'}) + html = BeautifulSoup(data, features=["html5lib", "permissive"]) + file_table = html.find('table', attrs={'class': 'torrentFileList'}) + + # cleanup memory + html.decompose() + gc.collect() if not file_table: return None @@ -248,11 +252,15 @@ class KATProvider(generic.TorrentProvider): continue try: - soup = BeautifulSoup(html, features=["html5lib", "permissive"]) + html = BeautifulSoup(html, features=["html5lib", "permissive"]) - torrent_table = soup.find('table', attrs={'class': 'data'}) + torrent_table = html.find('table', attrs={'class': 'data'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] + # cleanup memory + html.decompose() + gc.collect() + #Continue only if one Release is found if len(torrent_rows) < 2: logger.log(u"The data returned from " + self.name + " does not contain any torrents", @@ -284,7 +292,7 @@ class KATProvider(generic.TorrentProvider): logger.DEBUG) continue - #Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent + #Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent if mode == 'Season' and search_mode == 'sponly': ep_number = int(epcount / len(set(allPossibleShowNames(self.show)))) title = self._find_season_quality(title, link, ep_number) diff --git a/sickbeard/providers/publichd.py b/sickbeard/providers/publichd.py index ff365d6c..fddf0fe4 100644 --- a/sickbeard/providers/publichd.py +++ b/sickbeard/providers/publichd.py @@ -25,6 +25,7 @@ import traceback import urllib, urlparse import re import datetime +import gc import sickbeard import generic @@ -152,11 +153,15 @@ class PublicHDProvider(generic.TorrentProvider): html = os.linesep.join([s for s in html.splitlines() if not optreg.search(s)]) try: - soup = BeautifulSoup(html, features=["html5lib", "permissive"]) + html = BeautifulSoup(html, features=["html5lib", "permissive"]) - torrent_table = soup.find('table', attrs={'id': 'torrbg'}) + torrent_table = html.find('table', attrs={'id': 'torrbg'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] + # cleanup memory + html.decompose() + gc.collect() + #Continue only if one Release is found if len(torrent_rows) < 2: logger.log(u"The Data returned from " + self.name + " do not contains any torrent", diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 2f908245..1d5d3753 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -22,6 +22,7 @@ import re import traceback import datetime import urlparse +import gc import sickbeard import generic from sickbeard.common import Quality, cpu_presets @@ -203,6 +204,10 @@ class SCCProvider(generic.TorrentProvider): torrent_table = html.find('table', attrs={'id': 'torrents-table'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] + # cleanup memory + html.decompose() + gc.collect() + #Continue only if at least one Release is found if len(torrent_rows) < 2: if html.title: @@ -222,10 +227,14 @@ class SCCProvider(generic.TorrentProvider): url = all_urls[1] else: url = all_urls[0] + title = link.string if re.search('\.\.\.', title): details_html = BeautifulSoup(self.getURL(self.url + "/" + link['href'])) title = re.search('(?<=").+(?