mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Fixes to help with brining down the cpu usage of SickRage
This commit is contained in:
parent
9a23bfd0f5
commit
f9052ca2b2
18 changed files with 43 additions and 8 deletions
|
@ -97,6 +97,9 @@ class NameParser(object):
|
|||
return None
|
||||
|
||||
for (cur_regex_name, cur_regex) in self.compiled_regexes:
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
match = cur_regex.match(name)
|
||||
|
||||
if not match:
|
||||
|
|
|
@ -337,6 +337,8 @@ class BTNCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
|
|
@ -278,6 +278,8 @@ class GenericProvider:
|
|||
for ep_obj in searchItems:
|
||||
for item in searchItems[ep_obj]:
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
(title, url) = self._get_title_and_url(item)
|
||||
|
||||
quality = self.getQuality(item)
|
||||
|
|
|
@ -226,6 +226,8 @@ class HDBitsCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
ql.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
|
|
|
@ -361,6 +361,8 @@ class HDTorrentsCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
|
|
@ -306,6 +306,8 @@ class IPTorrentsCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
|
|
@ -436,6 +436,8 @@ class KATCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
|
|
@ -17,11 +17,9 @@
|
|||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import urllib
|
||||
import email.utils
|
||||
import time
|
||||
import datetime
|
||||
import re
|
||||
import os
|
||||
import copy
|
||||
|
||||
try:
|
||||
import xml.etree.cElementTree as etree
|
||||
|
@ -274,7 +272,6 @@ class NewznabCache(tvcache.TVCache):
|
|||
def _checkAuth(self, data):
|
||||
return self.provider._checkAuthFromData(data)
|
||||
|
||||
|
||||
def updateCache(self):
|
||||
if not self.shouldUpdate():
|
||||
return
|
||||
|
@ -300,6 +297,8 @@ class NewznabCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
ql.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
|
|
|
@ -355,6 +355,8 @@ class NextGenCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
|
|
@ -328,6 +328,8 @@ class PublicHDCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
ql.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
|
|
|
@ -348,6 +348,8 @@ class SCCCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
|
|
@ -284,6 +284,8 @@ class SpeedCDCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
ql.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(ql)
|
||||
|
||||
|
|
|
@ -417,6 +417,8 @@ class ThePirateBayCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
|
|
@ -309,6 +309,8 @@ class TorrentDayCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
|
|
@ -307,6 +307,8 @@ class TorrentLeechCache(tvcache.TVCache):
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ class Scheduler:
|
|||
def runAction(self):
|
||||
|
||||
while True:
|
||||
time.sleep(1)
|
||||
|
||||
currentTime = datetime.datetime.now()
|
||||
|
||||
if currentTime - self.lastRun > self.cycleTime:
|
||||
|
@ -79,3 +79,5 @@ class Scheduler:
|
|||
self.abort = False
|
||||
self.thread = None
|
||||
return
|
||||
|
||||
time.sleep(1)
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
from __future__ import with_statement
|
||||
|
||||
import datetime
|
||||
import time
|
||||
import traceback
|
||||
import threading
|
||||
|
||||
|
@ -79,8 +79,8 @@ class SearchQueue(generic_queue.GenericQueue):
|
|||
for result in item.results:
|
||||
# just use the first result for now
|
||||
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
||||
status = search.snatchEpisode(result)
|
||||
item.success = status
|
||||
item.success = search.snatchEpisode(result)
|
||||
time.sleep(2)
|
||||
generic_queue.QueueItem.finish(item)
|
||||
|
||||
class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||
|
|
|
@ -124,6 +124,8 @@ class TVCache():
|
|||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
|
@ -372,6 +374,9 @@ class TVCache():
|
|||
|
||||
# for each cache entry
|
||||
for curResult in sqlResults:
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
# skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well)
|
||||
if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]):
|
||||
continue
|
||||
|
|
Loading…
Reference in a new issue