mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-04 10:23:37 +00:00
Added code to perform disposal of bs4 class when finished as well as perform garbage collection afterwards, this fixed a memory leak issue SR was experiancing during backlog/manual/failed searches.
This commit is contained in:
parent
f340f9b073
commit
4a29476415
12 changed files with 83 additions and 29 deletions
|
@ -21,6 +21,7 @@ import traceback
|
||||||
import datetime
|
import datetime
|
||||||
import urlparse
|
import urlparse
|
||||||
import time
|
import time
|
||||||
|
import gc
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
import generic
|
||||||
from sickbeard.common import Quality, cpu_presets
|
from sickbeard.common import Quality, cpu_presets
|
||||||
|
@ -175,6 +176,10 @@ class BitSoupProvider(generic.TorrentProvider):
|
||||||
torrent_table = html.find('table', attrs={'class': 'koptekst'})
|
torrent_table = html.find('table', attrs={'class': 'koptekst'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows) < 2:
|
if len(torrent_rows) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
|
|
@ -21,6 +21,7 @@ import traceback
|
||||||
import datetime
|
import datetime
|
||||||
import urlparse
|
import urlparse
|
||||||
import time
|
import time
|
||||||
|
import gc
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
import generic
|
||||||
from sickbeard.common import Quality, cpu_presets
|
from sickbeard.common import Quality, cpu_presets
|
||||||
|
@ -199,6 +200,11 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
torrent_table = html.find('table', attrs={'class': 'frame'})
|
torrent_table = html.find('table', attrs={'class': 'frame'})
|
||||||
torrent_rows = torrent_table.findChildren('tr') if torrent_table else []
|
torrent_rows = torrent_table.findChildren('tr') if torrent_table else []
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows) < 2:
|
if len(torrent_rows) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
|
|
@ -22,6 +22,7 @@ import re
|
||||||
import traceback
|
import traceback
|
||||||
import datetime
|
import datetime
|
||||||
import urlparse
|
import urlparse
|
||||||
|
import gc
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
import generic
|
||||||
from sickbeard.common import Quality, cpu_presets
|
from sickbeard.common import Quality, cpu_presets
|
||||||
|
@ -203,6 +204,10 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
#Get first entry in table
|
#Get first entry in table
|
||||||
entries = html.find_all('td', attrs={'align': 'center'})
|
entries = html.find_all('td', attrs={'align': 'center'})
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
if not entries:
|
if not entries:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
|
|
|
@ -21,6 +21,7 @@ import re
|
||||||
import traceback
|
import traceback
|
||||||
import datetime
|
import datetime
|
||||||
import urlparse
|
import urlparse
|
||||||
|
import gc
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
import generic
|
||||||
from sickbeard.common import Quality, cpu_presets
|
from sickbeard.common import Quality, cpu_presets
|
||||||
|
@ -182,6 +183,10 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
torrent_table = html.find('table', attrs={'class': 'torrents'})
|
torrent_table = html.find('table', attrs={'class': 'torrents'})
|
||||||
torrents = torrent_table.find_all('tr') if torrent_table else []
|
torrents = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrents) < 2:
|
if len(torrents) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
|
|
||||||
import time
|
import gc
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import traceback
|
import traceback
|
||||||
|
@ -120,8 +120,12 @@ class KATProvider(generic.TorrentProvider):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
soup = BeautifulSoup(data, features=["html5lib", "permissive"])
|
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
||||||
file_table = soup.find('table', attrs={'class': 'torrentFileList'})
|
file_table = html.find('table', attrs={'class': 'torrentFileList'})
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
if not file_table:
|
if not file_table:
|
||||||
return None
|
return None
|
||||||
|
@ -248,11 +252,15 @@ class KATProvider(generic.TorrentProvider):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
html = BeautifulSoup(html, features=["html5lib", "permissive"])
|
||||||
|
|
||||||
torrent_table = soup.find('table', attrs={'class': 'data'})
|
torrent_table = html.find('table', attrs={'class': 'data'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows) < 2:
|
if len(torrent_rows) < 2:
|
||||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||||
|
|
|
@ -25,6 +25,7 @@ import traceback
|
||||||
import urllib, urlparse
|
import urllib, urlparse
|
||||||
import re
|
import re
|
||||||
import datetime
|
import datetime
|
||||||
|
import gc
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
import generic
|
||||||
|
@ -152,11 +153,15 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
html = os.linesep.join([s for s in html.splitlines() if not optreg.search(s)])
|
html = os.linesep.join([s for s in html.splitlines() if not optreg.search(s)])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
html = BeautifulSoup(html, features=["html5lib", "permissive"])
|
||||||
|
|
||||||
torrent_table = soup.find('table', attrs={'id': 'torrbg'})
|
torrent_table = html.find('table', attrs={'id': 'torrbg'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows) < 2:
|
if len(torrent_rows) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
|
|
@ -22,6 +22,7 @@ import re
|
||||||
import traceback
|
import traceback
|
||||||
import datetime
|
import datetime
|
||||||
import urlparse
|
import urlparse
|
||||||
|
import gc
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
import generic
|
||||||
from sickbeard.common import Quality, cpu_presets
|
from sickbeard.common import Quality, cpu_presets
|
||||||
|
@ -203,6 +204,10 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
torrent_table = html.find('table', attrs={'id': 'torrents-table'})
|
torrent_table = html.find('table', attrs={'id': 'torrents-table'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
#Continue only if at least one Release is found
|
#Continue only if at least one Release is found
|
||||||
if len(torrent_rows) < 2:
|
if len(torrent_rows) < 2:
|
||||||
if html.title:
|
if html.title:
|
||||||
|
@ -222,10 +227,14 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
url = all_urls[1]
|
url = all_urls[1]
|
||||||
else:
|
else:
|
||||||
url = all_urls[0]
|
url = all_urls[0]
|
||||||
|
|
||||||
title = link.string
|
title = link.string
|
||||||
if re.search('\.\.\.', title):
|
if re.search('\.\.\.', title):
|
||||||
details_html = BeautifulSoup(self.getURL(self.url + "/" + link['href']))
|
details_html = BeautifulSoup(self.getURL(self.url + "/" + link['href']))
|
||||||
title = re.search('(?<=").+(?<!")', details_html.title.string).group(0)
|
title = re.search('(?<=").+(?<!")', details_html.title.string).group(0)
|
||||||
|
details_html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
download_url = self.urls['download'] % url['href']
|
download_url = self.urls['download'] % url['href']
|
||||||
id = int(link['href'].replace('details?id=', ''))
|
id = int(link['href'].replace('details?id=', ''))
|
||||||
seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
|
seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
|
||||||
|
|
|
@ -21,6 +21,7 @@ import traceback
|
||||||
import datetime
|
import datetime
|
||||||
import urlparse
|
import urlparse
|
||||||
import time
|
import time
|
||||||
|
import gc
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
import generic
|
||||||
from sickbeard.common import Quality, cpu_presets
|
from sickbeard.common import Quality, cpu_presets
|
||||||
|
@ -175,6 +176,10 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
||||||
torrent_table = html.find('table', attrs={'border': '1'})
|
torrent_table = html.find('table', attrs={'border': '1'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows) < 2:
|
if len(torrent_rows) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
|
|
@ -21,6 +21,7 @@ import traceback
|
||||||
import datetime
|
import datetime
|
||||||
import urlparse
|
import urlparse
|
||||||
import time
|
import time
|
||||||
|
import gc
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
import generic
|
||||||
from sickbeard.common import Quality, cpu_presets
|
from sickbeard.common import Quality, cpu_presets
|
||||||
|
@ -178,6 +179,10 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
||||||
torrent_table = html.find('table', attrs={'id': 'torrenttable'})
|
torrent_table = html.find('table', attrs={'id': 'torrenttable'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows) < 2:
|
if len(torrent_rows) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
|
|
@ -441,7 +441,6 @@ def get_xem_absolute_numbering_for_show(indexer_id, indexer):
|
||||||
|
|
||||||
xem_refresh(indexer_id, indexer)
|
xem_refresh(indexer_id, indexer)
|
||||||
|
|
||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
rows = myDB.select(
|
rows = myDB.select(
|
||||||
|
|
|
@ -227,11 +227,6 @@ class TVShow(object):
|
||||||
|
|
||||||
def getEpisode(self, season=None, episode=None, file=None, noCreate=False, absolute_number=None, forceUpdate=False):
|
def getEpisode(self, season=None, episode=None, file=None, noCreate=False, absolute_number=None, forceUpdate=False):
|
||||||
|
|
||||||
# Load XEM data to DB for show
|
|
||||||
sickbeard.scene_numbering.xem_refresh(self.indexerid, self.indexer, force=forceUpdate)
|
|
||||||
|
|
||||||
ep = None
|
|
||||||
|
|
||||||
# if we get an anime get the real season and episode
|
# if we get an anime get the real season and episode
|
||||||
if self.is_anime and absolute_number and not season and not episode:
|
if self.is_anime and absolute_number and not season and not episode:
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
|
@ -269,21 +264,23 @@ class TVShow(object):
|
||||||
else:
|
else:
|
||||||
ep = TVEpisode(self, season, episode)
|
ep = TVEpisode(self, season, episode)
|
||||||
|
|
||||||
# get scene absolute numbering
|
|
||||||
ep.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering(self.indexerid,
|
|
||||||
self.indexer,
|
|
||||||
ep.absolute_number)
|
|
||||||
|
|
||||||
# get scene season and episode numbering
|
|
||||||
ep.scene_season, ep.scene_episode = sickbeard.scene_numbering.get_scene_numbering(self.indexerid,
|
|
||||||
self.indexer,
|
|
||||||
season, episode)
|
|
||||||
|
|
||||||
if ep != None:
|
if ep != None:
|
||||||
|
# Load XEM data to DB for show
|
||||||
|
sickbeard.scene_numbering.xem_refresh(self.indexerid, self.indexer, force=forceUpdate)
|
||||||
|
|
||||||
|
# get scene absolute numbering
|
||||||
|
ep.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering(self.indexerid,
|
||||||
|
self.indexer,
|
||||||
|
ep.absolute_number)
|
||||||
|
|
||||||
|
# get scene season and episode numbering
|
||||||
|
ep.scene_season, ep.scene_episode = sickbeard.scene_numbering.get_scene_numbering(self.indexerid,
|
||||||
|
self.indexer,
|
||||||
|
season, episode)
|
||||||
|
|
||||||
self.episodes[season][episode] = ep
|
self.episodes[season][episode] = ep
|
||||||
|
|
||||||
epObj = self.episodes[season][episode]
|
return self.episodes[season][episode]
|
||||||
return epObj
|
|
||||||
|
|
||||||
def should_update(self, update_date=datetime.date.today()):
|
def should_update(self, update_date=datetime.date.today()):
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ from __future__ import with_statement
|
||||||
import unittest
|
import unittest
|
||||||
import sys, os.path
|
import sys, os.path
|
||||||
import urlparse
|
import urlparse
|
||||||
|
import gc
|
||||||
|
|
||||||
sys.path.append(os.path.abspath('..'))
|
sys.path.append(os.path.abspath('..'))
|
||||||
sys.path.append(os.path.abspath('../lib'))
|
sys.path.append(os.path.abspath('../lib'))
|
||||||
|
@ -40,11 +41,15 @@ class TorrentBasicTests(test.SickbeardTestDBCase):
|
||||||
if not html:
|
if not html:
|
||||||
return
|
return
|
||||||
|
|
||||||
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
html = BeautifulSoup(html, features=["html5lib", "permissive"])
|
||||||
|
|
||||||
torrent_table = soup.find('table', attrs={'class': 'data'})
|
torrent_table = html.find('table', attrs={'class': 'data'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
|
# cleanup memory
|
||||||
|
html.decompose()
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows) < 2:
|
if len(torrent_rows) < 2:
|
||||||
print(u"The data returned does not contain any torrents")
|
print(u"The data returned does not contain any torrents")
|
||||||
|
|
Loading…
Reference in a new issue