Change increase namecache size and fix deleting items from it when at capacity.

Add thread lock to NameParserCache.
Add exception handling for add to cache.
Change increase NameParserCache to 1000 entries.
Change NameParserCache to OrderedDefaultdict.
Add move_to_end to OrderedDefaultdict class (backported from python 3).
Add first_key, last_key to OrderedDefaultdict.
Change use much faster first_key in NameParserCache.
Change remove import of python pre 2.7 OrderedDict.
This commit is contained in:
Prinz23 2018-04-11 15:13:20 +01:00 committed by JackDandy
parent 0c276e81fb
commit c1d3487a21
11 changed files with 94 additions and 67 deletions

View file

@ -1,5 +1,6 @@
### 0.16.0 (2018-xx-xx xx:xx:xx UTC)
* Change increase namecache size and fix deleting items from it when at capacity
* Change improve security with cross-site request forgery (xsrf) protection on web forms
* Change improve security by sending header flags httponly and secure with cookies
* Change improve security with DNS rebinding prevention, set "Allowed browser hostnames" at config/General/Web Interface

View file

@ -15,18 +15,16 @@
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import datetime
import os
from collections import OrderedDict
import sickbeard
from sickbeard.common import Quality
from unidecode import unidecode
try:
from collections import OrderedDict
except ImportError:
from requests.compat import OrderedDict
import datetime
import os
import re
import sickbeard
class SearchResult(object):
@ -343,6 +341,35 @@ class OrderedDefaultdict(OrderedDict):
args = (self.default_factory,) if self.default_factory else ()
return self.__class__, args, None, None, self.iteritems()
# backport from python 3
def move_to_end(self, key, last=True):
"""Move an existing element to the end (or beginning if last==False).
Raises KeyError if the element does not exist.
When last=True, acts like a fast version of self[key]=self.pop(key).
"""
link_prev, link_next, key = link = self._OrderedDict__map[key]
link_prev[1] = link_next
link_next[0] = link_prev
root = self._OrderedDict__root
if last:
last = root[0]
link[0] = last
link[1] = root
last[1] = root[0] = link
else:
first = root[1]
link[0] = root
link[1] = first
root[1] = first[0] = link
def first_key(self):
return self._OrderedDict__root[1][2]
def last_key(self):
return self._OrderedDict__root[0][2]
class ImageUrlList(list):
def __init__(self, max_age=30):

View file

@ -16,8 +16,9 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from sickbeard import db
from collections import OrderedDict
from sickbeard import db
import re
MIN_DB_VERSION = 1

View file

@ -23,6 +23,7 @@ import os
import os.path
import re
import time
import threading
import regexes
import sickbeard
@ -35,6 +36,7 @@ except ImportError:
from sickbeard import logger, helpers, scene_numbering, common, scene_exceptions, encodingKludge as ek, db
from sickbeard.exceptions import ex
from sickbeard.classes import OrderedDefaultdict
class NameParser(object):
@ -706,20 +708,31 @@ class ParseResult(object):
class NameParserCache(object):
_previous_parsed = {}
_cache_size = 100
def __init__(self):
super(NameParserCache, self).__init__()
self._previous_parsed = OrderedDefaultdict()
self._cache_size = 1000
self.lock = threading.Lock()
def add(self, name, parse_result):
self._previous_parsed[name] = parse_result
_current_cache_size = len(self._previous_parsed)
if _current_cache_size > self._cache_size:
for i in range(_current_cache_size - self._cache_size):
del self._previous_parsed[self._previous_parsed.keys()[0]]
with self.lock:
self._previous_parsed[name] = parse_result
_current_cache_size = len(self._previous_parsed)
if _current_cache_size > self._cache_size:
key = None
for i in range(_current_cache_size - self._cache_size):
try:
key = self._previous_parsed.first_key()
del self._previous_parsed[key]
except KeyError:
logger.log('Could not remove old NameParserCache entry: %s' % key, logger.DEBUG)
def get(self, name):
if name in self._previous_parsed:
logger.log('Using cached parse result for: ' + name, logger.DEBUG)
return self._previous_parsed[name]
with self.lock:
if name in self._previous_parsed:
logger.log('Using cached parse result for: ' + name, logger.DEBUG)
self._previous_parsed.move_to_end(name)
return self._previous_parsed[name]
name_parser_cache = NameParserCache()

View file

@ -15,10 +15,8 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
try:
from collections import OrderedDict
except ImportError:
from requests.compat import OrderedDict
from collections import OrderedDict
import re
import traceback

View file

@ -15,10 +15,8 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
try:
from collections import OrderedDict
except ImportError:
from requests.compat import OrderedDict
from collections import OrderedDict
import re
import traceback

View file

@ -15,10 +15,8 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
try:
from collections import OrderedDict
except ImportError:
from requests.compat import OrderedDict
from collections import OrderedDict
import re
import traceback

View file

@ -15,10 +15,8 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
try:
from collections import OrderedDict
except ImportError:
from requests.compat import OrderedDict
from collections import OrderedDict
import re
import traceback

View file

@ -17,26 +17,28 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import time
import sickbeard
import datetime
import re
import urllib
from collections import OrderedDict
from math import ceil
from sickbeard.sbdatetime import sbdatetime
from . import generic
from sickbeard import helpers, logger, tvcache, classes, db
from sickbeard.common import neededQualities, Quality, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED
from sickbeard.exceptions import AuthException, MultipleShowObjectsException
from sickbeard.indexers.indexer_config import *
import datetime
import re
import time
import urllib
import sickbeard
from io import BytesIO
from lib.dateutil import parser
from sickbeard.network_timezones import sb_timezone
from . import generic
from sickbeard import classes, db, helpers, logger, tvcache
from sickbeard.common import neededQualities, Quality, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST
from sickbeard.exceptions import AuthException, MultipleShowObjectsException
from sickbeard.helpers import tryInt
from sickbeard.indexers.indexer_config import *
from sickbeard.network_timezones import sb_timezone
from sickbeard.sbdatetime import sbdatetime
from sickbeard.search import get_aired_in_season, get_wanted_qualities
from sickbeard.show_name_helpers import get_show_names
from sickbeard.search import get_wanted_qualities, get_aired_in_season
try:
from lxml import etree
@ -46,11 +48,6 @@ except ImportError:
except ImportError:
import xml.etree.ElementTree as etree
try:
from collections import OrderedDict
except ImportError:
from requests.compat import OrderedDict
class NewznabConstants:
SEARCH_TEXT = -100
@ -73,7 +70,7 @@ class NewznabConstants:
r'^BoxHD$': CAT_HD,
r'^UHD$': CAT_UHD,
r'^4K$': CAT_UHD,
#r'^HEVC$': CAT_HEVC,
# r'^HEVC$': CAT_HEVC,
r'^WEB.?DL$': CAT_WEBDL}
providerToIndexerMapping = {'tvdbid': INDEXER_TVDB,
@ -101,8 +98,8 @@ class NewznabConstants:
class NewznabProvider(generic.NZBProvider):
def __init__(self, name, url, key='', cat_ids=None, search_mode=None,
search_fallback=False, enable_recentsearch=False, enable_backlog=False, enable_scheduled_backlog=False):
def __init__(self, name, url, key='', cat_ids=None, search_mode=None, search_fallback=False,
enable_recentsearch=False, enable_backlog=False, enable_scheduled_backlog=False):
generic.NZBProvider.__init__(self, name, True, False)
self.url = url
@ -235,8 +232,8 @@ class NewznabProvider(generic.NZBProvider):
limit = xml_caps.find('.//limits')
if None is not limit:
l = helpers.tryInt(limit.get('max'), 100)
self._limits = (100, l)[l >= 100]
lim = helpers.tryInt(limit.get('max'), 100)
self._limits = (100, lim)[lim >= 100]
try:
for category in xml_caps.iter('category'):

View file

@ -15,10 +15,8 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
try:
from collections import OrderedDict
except ImportError:
from requests.compat import OrderedDict
from collections import OrderedDict
import re
import traceback

View file

@ -15,10 +15,8 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
try:
from collections import OrderedDict
except ImportError:
from requests.compat import OrderedDict
from collections import OrderedDict
import re
import time
import traceback