mirror of
https://github.com/SickGear/SickGear.git
synced 2025-03-15 09:07:43 +00:00
Fixes to help with brining down the cpu usage of SickRage
This commit is contained in:
parent
9a23bfd0f5
commit
f9052ca2b2
18 changed files with 43 additions and 8 deletions
|
@ -97,6 +97,9 @@ class NameParser(object):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for (cur_regex_name, cur_regex) in self.compiled_regexes:
|
for (cur_regex_name, cur_regex) in self.compiled_regexes:
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
match = cur_regex.match(name)
|
match = cur_regex.match(name)
|
||||||
|
|
||||||
if not match:
|
if not match:
|
||||||
|
|
|
@ -337,6 +337,8 @@ class BTNCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
|
|
@ -278,6 +278,8 @@ class GenericProvider:
|
||||||
for ep_obj in searchItems:
|
for ep_obj in searchItems:
|
||||||
for item in searchItems[ep_obj]:
|
for item in searchItems[ep_obj]:
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
(title, url) = self._get_title_and_url(item)
|
(title, url) = self._get_title_and_url(item)
|
||||||
|
|
||||||
quality = self.getQuality(item)
|
quality = self.getQuality(item)
|
||||||
|
|
|
@ -226,6 +226,8 @@ class HDBitsCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
ql.append(ci)
|
ql.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(ql)
|
myDB.mass_action(ql)
|
||||||
|
|
||||||
|
|
|
@ -361,6 +361,8 @@ class HDTorrentsCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
|
|
@ -306,6 +306,8 @@ class IPTorrentsCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
|
|
@ -436,6 +436,8 @@ class KATCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
|
|
@ -17,11 +17,9 @@
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import urllib
|
import urllib
|
||||||
import email.utils
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
import copy
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import xml.etree.cElementTree as etree
|
import xml.etree.cElementTree as etree
|
||||||
|
@ -274,7 +272,6 @@ class NewznabCache(tvcache.TVCache):
|
||||||
def _checkAuth(self, data):
|
def _checkAuth(self, data):
|
||||||
return self.provider._checkAuthFromData(data)
|
return self.provider._checkAuthFromData(data)
|
||||||
|
|
||||||
|
|
||||||
def updateCache(self):
|
def updateCache(self):
|
||||||
if not self.shouldUpdate():
|
if not self.shouldUpdate():
|
||||||
return
|
return
|
||||||
|
@ -300,6 +297,8 @@ class NewznabCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
ql.append(ci)
|
ql.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(ql)
|
myDB.mass_action(ql)
|
||||||
|
|
||||||
|
|
|
@ -355,6 +355,8 @@ class NextGenCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
|
|
@ -328,6 +328,8 @@ class PublicHDCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
ql.append(ci)
|
ql.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(ql)
|
myDB.mass_action(ql)
|
||||||
|
|
||||||
|
|
|
@ -348,6 +348,8 @@ class SCCCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
|
|
@ -284,6 +284,8 @@ class SpeedCDCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
ql.append(ci)
|
ql.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(ql)
|
myDB.mass_action(ql)
|
||||||
|
|
||||||
|
|
|
@ -417,6 +417,8 @@ class ThePirateBayCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
|
|
@ -309,6 +309,8 @@ class TorrentDayCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
|
|
@ -307,6 +307,8 @@ class TorrentLeechCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
|
|
@ -61,7 +61,7 @@ class Scheduler:
|
||||||
def runAction(self):
|
def runAction(self):
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
time.sleep(1)
|
|
||||||
currentTime = datetime.datetime.now()
|
currentTime = datetime.datetime.now()
|
||||||
|
|
||||||
if currentTime - self.lastRun > self.cycleTime:
|
if currentTime - self.lastRun > self.cycleTime:
|
||||||
|
@ -79,3 +79,5 @@ class Scheduler:
|
||||||
self.abort = False
|
self.abort = False
|
||||||
self.thread = None
|
self.thread = None
|
||||||
return
|
return
|
||||||
|
|
||||||
|
time.sleep(1)
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
|
|
||||||
import datetime
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
|
@ -79,8 +79,8 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
for result in item.results:
|
for result in item.results:
|
||||||
# just use the first result for now
|
# just use the first result for now
|
||||||
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
||||||
status = search.snatchEpisode(result)
|
item.success = search.snatchEpisode(result)
|
||||||
item.success = status
|
time.sleep(2)
|
||||||
generic_queue.QueueItem.finish(item)
|
generic_queue.QueueItem.finish(item)
|
||||||
|
|
||||||
class ManualSearchQueueItem(generic_queue.QueueItem):
|
class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
|
|
|
@ -124,6 +124,8 @@ class TVCache():
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
@ -372,6 +374,9 @@ class TVCache():
|
||||||
|
|
||||||
# for each cache entry
|
# for each cache entry
|
||||||
for curResult in sqlResults:
|
for curResult in sqlResults:
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
# skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well)
|
# skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well)
|
||||||
if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]):
|
if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]):
|
||||||
continue
|
continue
|
||||||
|
|
Loading…
Reference in a new issue