From 11dfd66efdac36ee8db46d7bf104abffd7d45ff6 Mon Sep 17 00:00:00 2001 From: Prinz23 Date: Wed, 24 Jan 2018 10:30:32 +0000 Subject: [PATCH 1/2] Fix ensure url unicode shownames are correctly 'UTF-8' and urlencoded. Refactor shownames list into generic provider method, reusable in multiple providers. Add remove English at the end of release names (drunkenslug). --- sickbeard/providers/generic.py | 9 ++++++++- sickbeard/providers/newznab.py | 18 ++++++------------ 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 1658eb3a..a472a81e 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -27,6 +27,7 @@ import re import time import urlparse import threading +import urllib from urllib import quote_plus import zlib from base64 import b16encode, b32decode @@ -39,7 +40,7 @@ from hachoir_parser import guessParser from hachoir_core.error import HachoirError from hachoir_core.stream import FileInputStream -from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek +from sickbeard import helpers, classes, logger, db, tvcache, scene_exceptions, encodingKludge as ek from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT from sickbeard.exceptions import SickBeardException, AuthException, ex from sickbeard.helpers import maybe_plural, remove_file_failed @@ -165,6 +166,12 @@ class GenericProvider: return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, session=self.session, json=json, hooks=dict(response=self.cb_response)) + @staticmethod + def get_show_names_url_encoded(ep_obj, spacer='.'): + return [urllib.quote_plus(n.replace('.', spacer).encode('utf-8', errors='replace')) for n in list( + set([helpers.sanitizeSceneName(a) for a in + scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))] + def download_result(self, result): """ Save the result to disk. diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index c6fd1a60..656da53c 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -27,7 +27,7 @@ from math import ceil from sickbeard.sbdatetime import sbdatetime from . import generic -from sickbeard import helpers, logger, scene_exceptions, tvcache, classes, db +from sickbeard import helpers, logger, tvcache, classes, db from sickbeard.common import neededQualities, Quality from sickbeard.exceptions import AuthException, MultipleShowObjectsException from sickbeard.indexers.indexer_config import * @@ -351,15 +351,12 @@ class NewznabProvider(generic.NZBProvider): use_id = True use_id and search_params.append(params) + spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.' # query search and exceptions - name_exceptions = list( - set([helpers.sanitizeSceneName(a) for a in - scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]])) + name_exceptions = self.get_show_names_url_encoded(ep_obj, spacer) - spacer = 'geek' in self.get_id() and ' ' or '.' for cur_exception in name_exceptions: params = base_params.copy() - cur_exception = cur_exception.replace('.', spacer) if 'q' in params: params['q'] = '%s%s%s' % (cur_exception, spacer, params['q']) search_params.append(params) @@ -408,17 +405,14 @@ class NewznabProvider(generic.NZBProvider): use_id = True use_id and search_params.append(params) + spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.' # query search and exceptions - name_exceptions = list( - set([helpers.sanitizeSceneName(a) for a in - scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]])) + name_exceptions = self.get_show_names_url_encoded(ep_obj, spacer) - spacer = 'geek' in self.get_id() and ' ' or '.' if sickbeard.scene_exceptions.has_abs_episodes(ep_obj): search_params.append({'q': '%s%s%s' % (ep_obj.show.name, spacer, base_params['ep'])}) for cur_exception in name_exceptions: params = base_params.copy() - cur_exception = cur_exception.replace('.', spacer) params['q'] = cur_exception search_params.append(params) @@ -444,7 +438,7 @@ class NewznabProvider(generic.NZBProvider): r_found = True while r_found: r_found = False - for pattern, repl in ((r'(?i)-Obfuscated$', ''), (r'(?i)-postbot$', '')): + for pattern, repl in ((r'(?i)-Obfuscated$', ''), (r'(?i)-postbot$', ''), (r'(?i)[-.]English$', '')): if re.search(pattern, title): r_found = True title = re.sub(pattern, repl, title) From 4dea2ad022a6bc4d65f293678fc2012be4880ff9 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Wed, 24 Jan 2018 02:24:00 +0000 Subject: [PATCH 2/2] Add config/general/web interface/send security headers (default enabled). Change use bare minimum requests for all usenet_crawler search modes. Change use RSS endpoint for usenet_crawler in cache mode. Fix ensure remaining unicode shownames are correctly 'UTF-8' and url encoded. Fix omgwtf test of invalid auth, issue when enabling propers, and updating cache. Change refactor shownames list into reusable show_name_helper methods. Add season specific naming exceptions to nzb + btn. --- CHANGES.md | 11 +++++- .../interfaces/default/config_general.tmpl | 10 +++++ sickbeard/__init__.py | 5 ++- sickbeard/metadata/generic.py | 1 - sickbeard/providers/btn.py | 39 +++++-------------- sickbeard/providers/generic.py | 15 ++----- sickbeard/providers/newznab.py | 23 +++++++---- sickbeard/providers/omgwtfnzbs.py | 39 ++++++++++++------- sickbeard/show_name_helpers.py | 29 +++++++++++--- sickbeard/webapi.py | 2 + sickbeard/webserve.py | 4 ++ 11 files changed, 108 insertions(+), 70 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 98b534d9..12249c05 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,13 @@ -### 0.13.13 (2018-01-19 00:45:00 UTC) +### 0.13.14 (2018-01-25 16:20:00 UTC) + +* Add config/general/web interface/send security headers (default enabled) +* Fix usenet_crawler cache mode results +* Fix omgwtf test of invalid auth, issue when enabling propers, and updating cache +* Fix unicode shownames when searching +* Add season specific naming exceptions to nzb + btn + + +### 0.13.13 (2018-01-19 00:45:00 UTC) * Fix setting episode status when testing for if it should be deleted * Restrict setting newly added old episodes to WANTED to the last 90 days, older are set to SKIPPED diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl index 97abb763..3e924340 100644 --- a/gui/slick/interfaces/default/config_general.tmpl +++ b/gui/slick/interfaces/default/config_general.tmpl @@ -567,6 +567,16 @@ +
+ +
+ diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 07e44b79..b1e1d32a 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -130,6 +130,7 @@ WEB_IPV6 = None WEB_IPV64 = None HANDLE_REVERSE_PROXY = False +SEND_SECURITY_HEADERS = True PROXY_SETTING = None PROXY_INDEXERS = True @@ -587,7 +588,7 @@ def initialize(console_logging=True): HOME_SEARCH_FOCUS, USE_IMDB_INFO, IMDB_ACCOUNTS, SORT_ARTICLE, FUZZY_DATING, TRIM_ZERO, \ DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, TIMEZONE_DISPLAY, \ WEB_USERNAME, WEB_PASSWORD, CALENDAR_UNPROTECTED, USE_API, API_KEY, WEB_PORT, WEB_LOG, \ - ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY + ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY, SEND_SECURITY_HEADERS # Gen Config/Advanced global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \ ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET @@ -788,6 +789,7 @@ def initialize(console_logging=True): HTTPS_KEY = check_setting_str(CFG, 'General', 'https_key', 'server.key') HANDLE_REVERSE_PROXY = bool(check_setting_int(CFG, 'General', 'handle_reverse_proxy', 0)) + SEND_SECURITY_HEADERS = bool(check_setting_int(CFG, 'General', 'send_security_headers', 1)) ROOT_DIRS = check_setting_str(CFG, 'General', 'root_dirs', '') if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', ROOT_DIRS): @@ -1563,6 +1565,7 @@ def save_config(): new_config['General']['https_cert'] = HTTPS_CERT new_config['General']['https_key'] = HTTPS_KEY new_config['General']['handle_reverse_proxy'] = int(HANDLE_REVERSE_PROXY) + new_config['General']['send_security_headers'] = int(SEND_SECURITY_HEADERS) new_config['General']['use_nzbs'] = int(USE_NZBS) new_config['General']['use_torrents'] = int(USE_TORRENTS) new_config['General']['nzb_method'] = NZB_METHOD diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py index 08883cd9..88dac4cf 100644 --- a/sickbeard/metadata/generic.py +++ b/sickbeard/metadata/generic.py @@ -37,7 +37,6 @@ from sickbeard.metadata import helpers as metadata_helpers from sickbeard import logger from sickbeard import encodingKludge as ek from sickbeard.exceptions import ex -from sickbeard.show_name_helpers import allPossibleShowNames from sickbeard.indexers import indexer_config from sickbeard.indexers.indexer_config import INDEXER_TVDB, INDEXER_TVDB_V1 diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 288da4ae..ca37af5a 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -24,6 +24,7 @@ from sickbeard import helpers, logger, scene_exceptions, tvcache from sickbeard.bs4_parser import BS4Parser from sickbeard.exceptions import AuthException from sickbeard.helpers import tryInt +from sickbeard.show_name_helpers import get_show_names from lib.unidecode import unidecode try: @@ -293,20 +294,12 @@ class BTNProvider(generic.TorrentProvider): base_params['tvdb'] = ep_obj.show.indexerid base_params['series'] = ep_obj.show.name search_params.append(base_params) - # elif 2 == ep_obj.show.indexer: - # current_params['tvrage'] = ep_obj.show.indexerid - # search_params.append(current_params) - # else: - name_exceptions = list( - set([helpers.sanitizeSceneName(a) for a in - scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]])) - dedupe = [ep_obj.show.name.replace(' ', '.')] + + name_exceptions = get_show_names(ep_obj) for name in name_exceptions: - if name.replace(' ', '.') not in dedupe: - dedupe += [name.replace(' ', '.')] - series_param = base_params.copy() - series_param['series'] = name - search_params.append(series_param) + series_param = base_params.copy() + series_param['series'] = name + search_params.append(series_param) return [dict(Season=search_params)] @@ -318,7 +311,6 @@ class BTNProvider(generic.TorrentProvider): search_params = [] base_params = {'category': 'Episode'} - # episode if ep_obj.show.air_by_date or ep_obj.show.is_sports: date_str = str(ep_obj.airdate) @@ -333,27 +325,16 @@ class BTNProvider(generic.TorrentProvider): (ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)] base_params['name'] = 'S%02dE%02d' % (season, episode) - # search if 1 == ep_obj.show.indexer: base_params['tvdb'] = ep_obj.show.indexerid base_params['series'] = ep_obj.show.name search_params.append(base_params) - # elif 2 == ep_obj.show.indexer: - # search_params['tvrage'] = ep_obj.show.indexerid - # to_return.append(search_params) - # else: - # add new query string for every exception - name_exceptions = list( - set([helpers.sanitizeSceneName(a) for a in - scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]])) - dedupe = [ep_obj.show.name.replace(' ', '.')] + name_exceptions = get_show_names(ep_obj) for name in name_exceptions: - if name.replace(' ', '.') not in dedupe: - dedupe += [name.replace(' ', '.')] - series_param = base_params.copy() - series_param['series'] = name - search_params.append(series_param) + series_param = base_params.copy() + series_param['series'] = name + search_params.append(series_param) return [dict(Episode=search_params)] diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index a472a81e..bef4274a 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -27,7 +27,6 @@ import re import time import urlparse import threading -import urllib from urllib import quote_plus import zlib from base64 import b16encode, b32decode @@ -40,12 +39,12 @@ from hachoir_parser import guessParser from hachoir_core.error import HachoirError from hachoir_core.stream import FileInputStream -from sickbeard import helpers, classes, logger, db, tvcache, scene_exceptions, encodingKludge as ek +from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT from sickbeard.exceptions import SickBeardException, AuthException, ex from sickbeard.helpers import maybe_plural, remove_file_failed from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException -from sickbeard.show_name_helpers import allPossibleShowNames +from sickbeard.show_name_helpers import get_show_names_all_possible class HaltParseException(SickBeardException): @@ -166,12 +165,6 @@ class GenericProvider: return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, session=self.session, json=json, hooks=dict(response=self.cb_response)) - @staticmethod - def get_show_names_url_encoded(ep_obj, spacer='.'): - return [urllib.quote_plus(n.replace('.', spacer).encode('utf-8', errors='replace')) for n in list( - set([helpers.sanitizeSceneName(a) for a in - scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))] - def download_result(self, result): """ Save the result to disk. @@ -962,9 +955,7 @@ class TorrentProvider(object, GenericProvider): search_params = [] crop = re.compile(r'([.\s])(?:\1)+') - for name in set(allPossibleShowNames(self.show)): - if process_name and getattr(self, 'scene', True): - name = helpers.sanitizeSceneName(name) + for name in get_show_names_all_possible(self.show, scenify=process_name and getattr(self, 'scene', True)): for detail in ep_detail: search_params += [crop.sub(r'\1', '%s %s%s' % (name, x, detail)) for x in prefix] return search_params diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 656da53c..7c3cc1da 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -35,6 +35,7 @@ from io import BytesIO from lib.dateutil import parser from sickbeard.network_timezones import sb_timezone from sickbeard.helpers import tryInt +from sickbeard.show_name_helpers import get_show_names from sickbeard.search import get_wanted_qualities, get_aired_in_season try: @@ -353,8 +354,7 @@ class NewznabProvider(generic.NZBProvider): spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.' # query search and exceptions - name_exceptions = self.get_show_names_url_encoded(ep_obj, spacer) - + name_exceptions = get_show_names(ep_obj, spacer) for cur_exception in name_exceptions: params = base_params.copy() if 'q' in params: @@ -407,8 +407,7 @@ class NewznabProvider(generic.NZBProvider): spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.' # query search and exceptions - name_exceptions = self.get_show_names_url_encoded(ep_obj, spacer) - + name_exceptions = get_show_names(ep_obj, spacer) if sickbeard.scene_exceptions.has_abs_episodes(ep_obj): search_params.append({'q': '%s%s%s' % (ep_obj.show.name, spacer, base_params['ep'])}) for cur_exception in name_exceptions: @@ -627,8 +626,12 @@ class NewznabProvider(generic.NZBProvider): if v in self.caps]), 'offset': 0} + uc_only = all([re.search('(?i)usenet_crawler', self.get_id())]) + base_params_uc = {'num': self.limits, 'dl': '1', 'i': '64660'} + if isinstance(api_key, basestring) and api_key not in ('0', ''): base_params['apikey'] = api_key + base_params_uc['r'] = api_key results, n_spaces = [], {} total, cnt, search_url, exit_log = 0, len(results), '', True @@ -668,6 +671,7 @@ class NewznabProvider(generic.NZBProvider): if self.cat_ids or len(cat): base_params['cat'] = ','.join(sorted(set((self.cat_ids.split(',') if self.cat_ids else []) + cat))) + base_params_uc['t'] = base_params['cat'] request_params = base_params.copy() if ('Propers' == mode or 'nzbs_org' == self.get_id()) \ @@ -687,7 +691,10 @@ class NewznabProvider(generic.NZBProvider): while (offset <= total) and (offset < max_items) and batch_count: cnt = len(results) - search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params)) + if 'Cache' == mode and uc_only: + search_url = '%srss?%s' % (self.url, urllib.urlencode(base_params_uc)) + else: + search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params)) i and time.sleep(2.1) data = helpers.getURL(search_url) @@ -734,7 +741,9 @@ class NewznabProvider(generic.NZBProvider): hits += int(0 == hits) offset = helpers.tryInt(parsed_xml.find('.//%sresponse' % n_spaces['newznab']).get('offset', 0)) except (AttributeError, KeyError): - break + if not uc_only: + break + total = len(items) # No items found, prevent from doing another search if 0 == total: @@ -747,7 +756,7 @@ class NewznabProvider(generic.NZBProvider): first_date = self._parse_pub_date(items[0]) last_date = self._parse_pub_date(items[-1]) if not first_date or not last_date or not self._last_recent_search or \ - last_date <= self.last_recent_search: + last_date <= self.last_recent_search or uc_only: break if offset != request_params['offset']: diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index ebcd45b2..e2bbbc0a 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -22,13 +22,13 @@ import time import traceback import urllib +import feedparser import sickbeard from . import generic from sickbeard import classes, logger, show_name_helpers, tvcache from sickbeard.bs4_parser import BS4Parser from sickbeard.exceptions import AuthException -from sickbeard.rssfeeds import RSSFeeds from sickbeard.common import neededQualities @@ -38,11 +38,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider): generic.NZBProvider.__init__(self, 'omgwtfnzbs') self.url = 'https://omgwtfnzbs.me/' - self.url_base = 'https://omgwtfnzbs.me/' self.url_api = 'https://api.omgwtfnzbs.me/' self.urls = {'config_provider_home_uri': self.url_base, - 'cache': 'https://rss.omgwtfnzbs.me/rss-download.php?%s', + 'cache': self.url_api + 'xml/?%s', 'search': self.url_api + 'json/?%s', 'cache_html': self.url_base + 'browse.php?cat=tv%s', 'search_html': self.url_base + 'browse.php?cat=tv&search=%s'} @@ -69,7 +68,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider): if 'notice' in data_json: description_text = data_json.get('notice') - if 'information is incorrect' in data_json.get('notice'): + if re.search('(?i)(information is incorrect|in(?:valid|correct).*?(?:username|api))', + data_json.get('notice')): logger.log(u'Incorrect authentication credentials for ' + self.name + ' : ' + str(description_text), logger.DEBUG) raise AuthException( @@ -125,7 +125,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider): return result - def _get_cats(self, needed): + @staticmethod + def _get_cats(needed): cats = [] if needed.need_sd: cats.extend(OmgwtfnzbsProvider.cat_sd) @@ -140,21 +141,27 @@ class OmgwtfnzbsProvider(generic.NZBProvider): api_key = self._init_api() if False is api_key: return self.search_html(needed=needed, **kwargs) + results = [] cats = self._get_cats(needed=needed) if None is not api_key: - params = {'user': self.username, + params = {'search': '', + 'user': self.username, 'api': api_key, 'eng': 1, 'catid': ','.join(cats)} # SD,HD - rss_url = self.urls['cache'] % urllib.urlencode(params) + url = self.urls['cache'] % urllib.urlencode(params) - logger.log(self.name + u' cache update URL: ' + rss_url, logger.DEBUG) + response = self.get_url(url) - data = RSSFeeds(self).get_feed(rss_url) + data = feedparser.parse(response.replace('\n', '?>\n\n') + .replace('\n', '').replace('\n', '') + .replace('post>\n', 'entry>\n').replace('', '')) if data and 'entries' in data: - return data.entries - return [] + results = data.entries + + self._log_search('Cache', len(results), url) + return results def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0, needed=neededQualities(need_all=True), **kwargs): @@ -170,11 +177,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider): 'eng': 1, 'nukes': 1, 'catid': ','.join(cats), # SD,HD - 'retention': (sickbeard.USENET_RETENTION, retention)[retention or not sickbeard.USENET_RETENTION], + 'retention': retention or sickbeard.USENET_RETENTION or 0, 'search': search} search_url = self.urls['search'] % urllib.urlencode(params) - logger.log(u'Search url: ' + search_url, logger.DEBUG) data_json = self.get_url(search_url, json=True) if data_json and self._check_auth_from_data(data_json, is_xml=False): @@ -183,6 +189,13 @@ class OmgwtfnzbsProvider(generic.NZBProvider): if item.get('nuked', '').startswith('1'): continue results.append(item) + + mode = search_mode + if 'eponly' == search_mode: + mode = 'Episode' + elif 'sponly' == search_mode: + mode = 'Season' + self._log_search(mode, len(results), search_url) return results def search_html(self, search='', search_mode='', needed=neededQualities(need_all=True), **kwargs): diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py index aa6568f1..16c2a0a5 100644 --- a/sickbeard/show_name_helpers.py +++ b/sickbeard/show_name_helpers.py @@ -20,6 +20,7 @@ import os import re import datetime +from urllib import quote_plus import sickbeard from sickbeard import common @@ -125,11 +126,27 @@ def compile_word_list(lookup_words, re_prefix='(^|[\W_])', re_suffix='($|[\W_])' return result -def makeSceneShowSearchStrings(show, season=-1): - showNames = allPossibleShowNames(show, season=season) - # scenify the names - return map(sanitizeSceneName, showNames) +def url_encode(show_names, spacer='.'): + + return [quote_plus(n.replace('.', spacer).encode('utf-8', errors='replace')) for n in show_names] + + +def get_show_names(ep_obj, spacer='.'): + + old_anime, old_dirty = ep_obj.show.is_anime, ep_obj.show.dirty + ep_obj.show.anime = 1 # used to limit results from all_possible(...) + show_names = get_show_names_all_possible(ep_obj.show, season=ep_obj.season, spacer=spacer) + ep_obj.show.anime = old_anime # temporary measure, so restore property then dirty flag + ep_obj.show.dirty = old_dirty + return show_names + + +def get_show_names_all_possible(show, season=-1, scenify=True, spacer='.'): + show_names = set(allPossibleShowNames(show, season=season)) + if scenify: + show_names = map(sanitizeSceneName, show_names) + return url_encode(show_names, spacer) def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None): @@ -176,7 +193,7 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None): numseasons = int(numseasonsSQlResult[0][0]) seasonStrings = ["S%02d" % int(ep_obj.scene_season)] - showNames = set(makeSceneShowSearchStrings(show, ep_obj.scene_season)) + showNames = get_show_names_all_possible(show, ep_obj.scene_season) toReturn = [] @@ -221,7 +238,7 @@ def makeSceneSearchString(show, ep_obj): if numseasons == 1 and not ep_obj.show.is_anime: epStrings = [''] - showNames = set(makeSceneShowSearchStrings(show, ep_obj.scene_season)) + showNames = get_show_names_all_possible(show, ep_obj.scene_season) toReturn = [] diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py index 5444c91a..b7c5ffae 100644 --- a/sickbeard/webapi.py +++ b/sickbeard/webapi.py @@ -77,6 +77,8 @@ class Api(webserve.BaseHandler): def set_default_headers(self): self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet') + if sickbeard.SEND_SECURITY_HEADERS: + self.set_header('X-Frame-Options', 'SAMEORIGIN') def get(self, route, *args, **kwargs): route = route.strip('/') or 'index' diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index cb7babbb..0384ea6c 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -123,12 +123,16 @@ class PageTemplate(Template): class BaseStaticFileHandler(StaticFileHandler): def set_extra_headers(self, path): self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet') + if sickbeard.SEND_SECURITY_HEADERS: + self.set_header('X-Frame-Options', 'SAMEORIGIN') class BaseHandler(RequestHandler): def set_default_headers(self): self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet') + if sickbeard.SEND_SECURITY_HEADERS: + self.set_header('X-Frame-Options', 'SAMEORIGIN') def redirect(self, url, permanent=False, status=None): if not url.startswith(sickbeard.WEB_ROOT):