Merge pull request #798 from JackDandy/feature/ChangeImageCacheSecurity

Change improve security of cached image use.
This commit is contained in:
JackDandy 2016-10-06 21:25:03 +01:00 committed by GitHub
commit 171a981da2
5 changed files with 57 additions and 9 deletions

View file

@ -172,7 +172,7 @@
* Fix status reset of a snatched, downloaded, or archived episode when its date is set to never (no date) on the info
source and there is no media file
* Change only show unaired episodes on Manage/Backlog Overview and Manage/Episode Status Management where relevant
* Change locally cache "Add from Trakt" show posters
* Change locally cache Trakt/IMDb/Anime show cards
* Change allow pp to replace files with a repack or proper of same quality
* Fix ensure downloaded eps are not shown on episode view
* Fix allow propers to pp when show marked upgrade once
@ -199,6 +199,8 @@
* Change use legacy tzlocal() if new gettz fails to create
* Change load cached images on demand
* Change add rate limit handler for info source
* Change improve security of cached image use
* Change add helper function to validate acceptable image file extension
### 0.11.16 (2016-10-16 17:30:00 UTC)

View file

@ -37,7 +37,7 @@ sys.path.insert(1, os.path.abspath('../lib'))
from sickbeard import helpers, encodingKludge as ek
from sickbeard import db, logger, naming, metadata, providers, scene_exceptions, scene_numbering, \
scheduler, auto_post_processer, search_queue, search_propers, search_recent, search_backlog, \
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, minimax
from sickbeard.common import SD, SKIPPED
from sickbeard.databases import mainDB, cache_db, failed_db
@ -488,6 +488,8 @@ else:
COOKIE_SECRET = base64.b64encode(uuid.uuid4().bytes + uuid.uuid4().bytes)
CACHE_IMAGE_URL_LIST = classes.ImageUrlList()
__INITIALIZED__ = False
@ -545,7 +547,7 @@ def initialize(consoleLogging=True):
ANIME_DEFAULT, NAMING_ANIME, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
SCENE_DEFAULT, BACKLOG_DAYS, SEARCH_UNAIRED, UNAIRED_RECENT_SEARCH_ONLY, ANIME_TREAT_AS_HDTV, \
COOKIE_SECRET, USE_IMDB_INFO, IMDB_ACCOUNTS, DISPLAY_BACKGROUND, DISPLAY_BACKGROUND_TRANSPARENT, DISPLAY_ALL_SEASONS, \
SHOW_TAGS, DEFAULT_SHOW_TAG, SHOWLIST_TAGVIEW, background_mapping_task
SHOW_TAGS, DEFAULT_SHOW_TAG, SHOWLIST_TAGVIEW, background_mapping_task, CACHE_IMAGE_URL_LIST
if __INITIALIZED__:
return False

View file

@ -229,6 +229,7 @@ class UIError():
self.message = message
self.time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
class OrderedDefaultdict(OrderedDict):
def __init__(self, *args, **kwargs):
if not args:
@ -249,3 +250,36 @@ class OrderedDefaultdict(OrderedDict):
def __reduce__(self): # optional, for pickle support
args = (self.default_factory,) if self.default_factory else ()
return self.__class__, args, None, None, self.iteritems()
class ImageUrlList(list):
def __init__(self, iterable=None, max_age=30):
super(ImageUrlList, self).__init__()
self.max_age = max_age
def add_url(self, url):
self.remove_old()
for x in self:
if isinstance(x, (tuple, list)) and len(x) == 2 and url == x[0]:
x = (x[0], datetime.datetime.now())
return
self.append((url, datetime.datetime.now()))
def remove_old(self):
age_limit = datetime.datetime.now() - datetime.timedelta(minutes=self.max_age)
self[:] = [x for x in self if isinstance(x, (tuple, list)) and len(x) == 2 and x[1] > age_limit]
def __repr__(self):
return str([x[0] for x in self if isinstance(x, (tuple, list)) and len(x) == 2])
def __contains__(self, y):
for x in self:
if isinstance(x, (tuple, list)) and len(x) == 2 and y == x[0]:
return True
return False
def remove(self, x):
for v in self:
if isinstance(v, (tuple, list)) and len(v) == 2 and v[0] == x:
super(ImageUrlList, self).remove(v)
break

View file

@ -143,6 +143,15 @@ def has_media_ext(filename):
return (None is re.search('extras?$', sep_file[0], re.I)) and (sep_file[2].lower() in mediaExtensions)
def has_image_ext(filename):
try:
if ek.ek(os.path.splitext, filename)[1].lower() in ['.bmp', '.gif', '.jpeg', '.jpg', '.png', '.webp']:
return True
except (StandardError, Exception):
pass
return False
def is_first_rar_volume(filename):
return None is not re.search('(?P<file>^(?P<base>(?:(?!\.part\d+\.rar$).)*)\.(?:(?:part0*1\.)?rar)$)', filename)

View file

@ -46,7 +46,7 @@ from sickbeard.common import Quality, Overview, statusStrings, qualityPresetStri
from sickbeard.common import SNATCHED, UNAIRED, IGNORED, ARCHIVED, WANTED, FAILED, SKIPPED, DOWNLOADED, SNATCHED_BEST, SNATCHED_PROPER
from sickbeard.common import SD, HD720p, HD1080p
from sickbeard.exceptions import ex
from sickbeard.helpers import remove_article, starify
from sickbeard.helpers import has_image_ext, remove_article, starify
from sickbeard.indexers.indexer_config import INDEXER_TVDB, INDEXER_TVRAGE
from sickbeard.scene_numbering import get_scene_numbering, set_scene_numbering, get_scene_numbering_for_show, \
get_xem_numbering_for_show, get_scene_absolute_numbering_for_show, get_xem_absolute_numbering_for_show, \
@ -2657,6 +2657,7 @@ class NewHomeAddShows(Home):
img_uri = 'http://img7.anidb.net/pics/anime/%s' % image
images = dict(poster=dict(thumb='imagecache?path=anidb&source=%s' % img_uri))
sickbeard.CACHE_IMAGE_URL_LIST.add_url(img_uri)
votes = rating = 0
counts = anime.find('./ratings/permanent')
@ -2823,6 +2824,7 @@ class NewHomeAddShows(Home):
img_uri = img_uri.replace(match.group(), ''.join(
[str(y) for x in map(None, parts, scaled) for y in x if y is not None]))
images = dict(poster=dict(thumb='imagecache?path=imdb&source=%s' % img_uri))
sickbeard.CACHE_IMAGE_URL_LIST.add_url(img_uri)
filtered.append(dict(
premiered=dt_ordinal,
@ -3058,6 +3060,7 @@ class NewHomeAddShows(Home):
img_uri = item.get('show', {}).get('images', {}).get('poster', {}).get('thumb', {}) or ''
if img_uri:
images = dict(poster=dict(thumb='imagecache?path=trakt/poster/thumb&source=%s' % img_uri))
sickbeard.CACHE_IMAGE_URL_LIST.add_url(img_uri)
filtered.append(dict(
premiered=dt_ordinal,
@ -5598,8 +5601,6 @@ class Cache(MainHandler):
if not sql_results:
sql_results = []
t = PageTemplate(headers=self.request.headers, file='cache.tmpl')
t.cacheResults = sql_results
@ -5613,7 +5614,8 @@ class CachedImages(MainHandler):
file_name = ek.ek(os.path.basename, source)
static_image_path = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images', path, file_name)
static_image_path = ek.ek(os.path.abspath, static_image_path.replace('\\', '/'))
if not ek.ek(os.path.isfile, static_image_path) and source is not None:
if not ek.ek(os.path.isfile, static_image_path) and source is not None and has_image_ext(file_name) \
and source in sickbeard.CACHE_IMAGE_URL_LIST:
basepath = ek.ek(os.path.dirname, static_image_path)
helpers.make_dirs(basepath)
if not helpers.download_file(source, static_image_path) and source.find('trakt.us'):
@ -5624,4 +5626,3 @@ class CachedImages(MainHandler):
else:
helpers.set_file_timestamp(static_image_path, min_age=3, new_time=None)
self.redirect('cache/images/%s/%s' % (path, file_name))