mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Add config/general/web interface/send security headers (default enabled).
Change use bare minimum requests for all usenet_crawler search modes. Change use RSS endpoint for usenet_crawler in cache mode. Fix ensure remaining unicode shownames are correctly 'UTF-8' and url encoded. Fix omgwtf test of invalid auth, issue when enabling propers, and updating cache. Change refactor shownames list into reusable show_name_helper methods. Add season specific naming exceptions to nzb + btn.
This commit is contained in:
parent
11dfd66efd
commit
4dea2ad022
11 changed files with 108 additions and 70 deletions
11
CHANGES.md
11
CHANGES.md
|
@ -1,4 +1,13 @@
|
|||
### 0.13.13 (2018-01-19 00:45:00 UTC)
|
||||
### 0.13.14 (2018-01-25 16:20:00 UTC)
|
||||
|
||||
* Add config/general/web interface/send security headers (default enabled)
|
||||
* Fix usenet_crawler cache mode results
|
||||
* Fix omgwtf test of invalid auth, issue when enabling propers, and updating cache
|
||||
* Fix unicode shownames when searching
|
||||
* Add season specific naming exceptions to nzb + btn
|
||||
|
||||
|
||||
### 0.13.13 (2018-01-19 00:45:00 UTC)
|
||||
|
||||
* Fix setting episode status when testing for if it should be deleted
|
||||
* Restrict setting newly added old episodes to WANTED to the last 90 days, older are set to SKIPPED
|
||||
|
|
|
@ -567,6 +567,16 @@
|
|||
</label>
|
||||
</div>
|
||||
|
||||
<div class="field-pair">
|
||||
<label for="send_security_headers">
|
||||
<span class="component-title">Send security headers</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="send_security_headers" id="send_security_headers"#echo ('', $checked)[$sg_var('SEND_SECURITY_HEADERS')]#>
|
||||
<p>send the following headers to increase browser security...<br />(X-Frame-Options:SAMEORIGIN)</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<input type="submit" class="btn config_submitter" value="Save Changes">
|
||||
|
||||
</fieldset>
|
||||
|
|
|
@ -130,6 +130,7 @@ WEB_IPV6 = None
|
|||
WEB_IPV64 = None
|
||||
|
||||
HANDLE_REVERSE_PROXY = False
|
||||
SEND_SECURITY_HEADERS = True
|
||||
PROXY_SETTING = None
|
||||
PROXY_INDEXERS = True
|
||||
|
||||
|
@ -587,7 +588,7 @@ def initialize(console_logging=True):
|
|||
HOME_SEARCH_FOCUS, USE_IMDB_INFO, IMDB_ACCOUNTS, SORT_ARTICLE, FUZZY_DATING, TRIM_ZERO, \
|
||||
DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, TIMEZONE_DISPLAY, \
|
||||
WEB_USERNAME, WEB_PASSWORD, CALENDAR_UNPROTECTED, USE_API, API_KEY, WEB_PORT, WEB_LOG, \
|
||||
ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY
|
||||
ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY, SEND_SECURITY_HEADERS
|
||||
# Gen Config/Advanced
|
||||
global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \
|
||||
ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET
|
||||
|
@ -788,6 +789,7 @@ def initialize(console_logging=True):
|
|||
HTTPS_KEY = check_setting_str(CFG, 'General', 'https_key', 'server.key')
|
||||
|
||||
HANDLE_REVERSE_PROXY = bool(check_setting_int(CFG, 'General', 'handle_reverse_proxy', 0))
|
||||
SEND_SECURITY_HEADERS = bool(check_setting_int(CFG, 'General', 'send_security_headers', 1))
|
||||
|
||||
ROOT_DIRS = check_setting_str(CFG, 'General', 'root_dirs', '')
|
||||
if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', ROOT_DIRS):
|
||||
|
@ -1563,6 +1565,7 @@ def save_config():
|
|||
new_config['General']['https_cert'] = HTTPS_CERT
|
||||
new_config['General']['https_key'] = HTTPS_KEY
|
||||
new_config['General']['handle_reverse_proxy'] = int(HANDLE_REVERSE_PROXY)
|
||||
new_config['General']['send_security_headers'] = int(SEND_SECURITY_HEADERS)
|
||||
new_config['General']['use_nzbs'] = int(USE_NZBS)
|
||||
new_config['General']['use_torrents'] = int(USE_TORRENTS)
|
||||
new_config['General']['nzb_method'] = NZB_METHOD
|
||||
|
|
|
@ -37,7 +37,6 @@ from sickbeard.metadata import helpers as metadata_helpers
|
|||
from sickbeard import logger
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard.show_name_helpers import allPossibleShowNames
|
||||
from sickbeard.indexers import indexer_config
|
||||
from sickbeard.indexers.indexer_config import INDEXER_TVDB, INDEXER_TVDB_V1
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ from sickbeard import helpers, logger, scene_exceptions, tvcache
|
|||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.exceptions import AuthException
|
||||
from sickbeard.helpers import tryInt
|
||||
from sickbeard.show_name_helpers import get_show_names
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
try:
|
||||
|
@ -293,20 +294,12 @@ class BTNProvider(generic.TorrentProvider):
|
|||
base_params['tvdb'] = ep_obj.show.indexerid
|
||||
base_params['series'] = ep_obj.show.name
|
||||
search_params.append(base_params)
|
||||
# elif 2 == ep_obj.show.indexer:
|
||||
# current_params['tvrage'] = ep_obj.show.indexerid
|
||||
# search_params.append(current_params)
|
||||
# else:
|
||||
name_exceptions = list(
|
||||
set([helpers.sanitizeSceneName(a) for a in
|
||||
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
||||
dedupe = [ep_obj.show.name.replace(' ', '.')]
|
||||
|
||||
name_exceptions = get_show_names(ep_obj)
|
||||
for name in name_exceptions:
|
||||
if name.replace(' ', '.') not in dedupe:
|
||||
dedupe += [name.replace(' ', '.')]
|
||||
series_param = base_params.copy()
|
||||
series_param['series'] = name
|
||||
search_params.append(series_param)
|
||||
series_param = base_params.copy()
|
||||
series_param['series'] = name
|
||||
search_params.append(series_param)
|
||||
|
||||
return [dict(Season=search_params)]
|
||||
|
||||
|
@ -318,7 +311,6 @@ class BTNProvider(generic.TorrentProvider):
|
|||
search_params = []
|
||||
base_params = {'category': 'Episode'}
|
||||
|
||||
# episode
|
||||
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
|
||||
date_str = str(ep_obj.airdate)
|
||||
|
||||
|
@ -333,27 +325,16 @@ class BTNProvider(generic.TorrentProvider):
|
|||
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
|
||||
base_params['name'] = 'S%02dE%02d' % (season, episode)
|
||||
|
||||
# search
|
||||
if 1 == ep_obj.show.indexer:
|
||||
base_params['tvdb'] = ep_obj.show.indexerid
|
||||
base_params['series'] = ep_obj.show.name
|
||||
search_params.append(base_params)
|
||||
# elif 2 == ep_obj.show.indexer:
|
||||
# search_params['tvrage'] = ep_obj.show.indexerid
|
||||
# to_return.append(search_params)
|
||||
|
||||
# else:
|
||||
# add new query string for every exception
|
||||
name_exceptions = list(
|
||||
set([helpers.sanitizeSceneName(a) for a in
|
||||
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
|
||||
dedupe = [ep_obj.show.name.replace(' ', '.')]
|
||||
name_exceptions = get_show_names(ep_obj)
|
||||
for name in name_exceptions:
|
||||
if name.replace(' ', '.') not in dedupe:
|
||||
dedupe += [name.replace(' ', '.')]
|
||||
series_param = base_params.copy()
|
||||
series_param['series'] = name
|
||||
search_params.append(series_param)
|
||||
series_param = base_params.copy()
|
||||
series_param['series'] = name
|
||||
search_params.append(series_param)
|
||||
|
||||
return [dict(Episode=search_params)]
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@ import re
|
|||
import time
|
||||
import urlparse
|
||||
import threading
|
||||
import urllib
|
||||
from urllib import quote_plus
|
||||
import zlib
|
||||
from base64 import b16encode, b32decode
|
||||
|
@ -40,12 +39,12 @@ from hachoir_parser import guessParser
|
|||
from hachoir_core.error import HachoirError
|
||||
from hachoir_core.stream import FileInputStream
|
||||
|
||||
from sickbeard import helpers, classes, logger, db, tvcache, scene_exceptions, encodingKludge as ek
|
||||
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
|
||||
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
|
||||
from sickbeard.exceptions import SickBeardException, AuthException, ex
|
||||
from sickbeard.helpers import maybe_plural, remove_file_failed
|
||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
from sickbeard.show_name_helpers import allPossibleShowNames
|
||||
from sickbeard.show_name_helpers import get_show_names_all_possible
|
||||
|
||||
|
||||
class HaltParseException(SickBeardException):
|
||||
|
@ -166,12 +165,6 @@ class GenericProvider:
|
|||
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
|
||||
session=self.session, json=json, hooks=dict(response=self.cb_response))
|
||||
|
||||
@staticmethod
|
||||
def get_show_names_url_encoded(ep_obj, spacer='.'):
|
||||
return [urllib.quote_plus(n.replace('.', spacer).encode('utf-8', errors='replace')) for n in list(
|
||||
set([helpers.sanitizeSceneName(a) for a in
|
||||
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))]
|
||||
|
||||
def download_result(self, result):
|
||||
"""
|
||||
Save the result to disk.
|
||||
|
@ -962,9 +955,7 @@ class TorrentProvider(object, GenericProvider):
|
|||
|
||||
search_params = []
|
||||
crop = re.compile(r'([.\s])(?:\1)+')
|
||||
for name in set(allPossibleShowNames(self.show)):
|
||||
if process_name and getattr(self, 'scene', True):
|
||||
name = helpers.sanitizeSceneName(name)
|
||||
for name in get_show_names_all_possible(self.show, scenify=process_name and getattr(self, 'scene', True)):
|
||||
for detail in ep_detail:
|
||||
search_params += [crop.sub(r'\1', '%s %s%s' % (name, x, detail)) for x in prefix]
|
||||
return search_params
|
||||
|
|
|
@ -35,6 +35,7 @@ from io import BytesIO
|
|||
from lib.dateutil import parser
|
||||
from sickbeard.network_timezones import sb_timezone
|
||||
from sickbeard.helpers import tryInt
|
||||
from sickbeard.show_name_helpers import get_show_names
|
||||
from sickbeard.search import get_wanted_qualities, get_aired_in_season
|
||||
|
||||
try:
|
||||
|
@ -353,8 +354,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.'
|
||||
# query search and exceptions
|
||||
name_exceptions = self.get_show_names_url_encoded(ep_obj, spacer)
|
||||
|
||||
name_exceptions = get_show_names(ep_obj, spacer)
|
||||
for cur_exception in name_exceptions:
|
||||
params = base_params.copy()
|
||||
if 'q' in params:
|
||||
|
@ -407,8 +407,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.'
|
||||
# query search and exceptions
|
||||
name_exceptions = self.get_show_names_url_encoded(ep_obj, spacer)
|
||||
|
||||
name_exceptions = get_show_names(ep_obj, spacer)
|
||||
if sickbeard.scene_exceptions.has_abs_episodes(ep_obj):
|
||||
search_params.append({'q': '%s%s%s' % (ep_obj.show.name, spacer, base_params['ep'])})
|
||||
for cur_exception in name_exceptions:
|
||||
|
@ -627,8 +626,12 @@ class NewznabProvider(generic.NZBProvider):
|
|||
if v in self.caps]),
|
||||
'offset': 0}
|
||||
|
||||
uc_only = all([re.search('(?i)usenet_crawler', self.get_id())])
|
||||
base_params_uc = {'num': self.limits, 'dl': '1', 'i': '64660'}
|
||||
|
||||
if isinstance(api_key, basestring) and api_key not in ('0', ''):
|
||||
base_params['apikey'] = api_key
|
||||
base_params_uc['r'] = api_key
|
||||
|
||||
results, n_spaces = [], {}
|
||||
total, cnt, search_url, exit_log = 0, len(results), '', True
|
||||
|
@ -668,6 +671,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
if self.cat_ids or len(cat):
|
||||
base_params['cat'] = ','.join(sorted(set((self.cat_ids.split(',') if self.cat_ids else []) + cat)))
|
||||
base_params_uc['t'] = base_params['cat']
|
||||
|
||||
request_params = base_params.copy()
|
||||
if ('Propers' == mode or 'nzbs_org' == self.get_id()) \
|
||||
|
@ -687,7 +691,10 @@ class NewznabProvider(generic.NZBProvider):
|
|||
while (offset <= total) and (offset < max_items) and batch_count:
|
||||
cnt = len(results)
|
||||
|
||||
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
|
||||
if 'Cache' == mode and uc_only:
|
||||
search_url = '%srss?%s' % (self.url, urllib.urlencode(base_params_uc))
|
||||
else:
|
||||
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
|
||||
i and time.sleep(2.1)
|
||||
|
||||
data = helpers.getURL(search_url)
|
||||
|
@ -734,7 +741,9 @@ class NewznabProvider(generic.NZBProvider):
|
|||
hits += int(0 == hits)
|
||||
offset = helpers.tryInt(parsed_xml.find('.//%sresponse' % n_spaces['newznab']).get('offset', 0))
|
||||
except (AttributeError, KeyError):
|
||||
break
|
||||
if not uc_only:
|
||||
break
|
||||
total = len(items)
|
||||
|
||||
# No items found, prevent from doing another search
|
||||
if 0 == total:
|
||||
|
@ -747,7 +756,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
first_date = self._parse_pub_date(items[0])
|
||||
last_date = self._parse_pub_date(items[-1])
|
||||
if not first_date or not last_date or not self._last_recent_search or \
|
||||
last_date <= self.last_recent_search:
|
||||
last_date <= self.last_recent_search or uc_only:
|
||||
break
|
||||
|
||||
if offset != request_params['offset']:
|
||||
|
|
|
@ -22,13 +22,13 @@ import time
|
|||
import traceback
|
||||
import urllib
|
||||
|
||||
import feedparser
|
||||
import sickbeard
|
||||
|
||||
from . import generic
|
||||
from sickbeard import classes, logger, show_name_helpers, tvcache
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from sickbeard.exceptions import AuthException
|
||||
from sickbeard.rssfeeds import RSSFeeds
|
||||
from sickbeard.common import neededQualities
|
||||
|
||||
|
||||
|
@ -38,11 +38,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
generic.NZBProvider.__init__(self, 'omgwtfnzbs')
|
||||
|
||||
self.url = 'https://omgwtfnzbs.me/'
|
||||
|
||||
self.url_base = 'https://omgwtfnzbs.me/'
|
||||
self.url_api = 'https://api.omgwtfnzbs.me/'
|
||||
self.urls = {'config_provider_home_uri': self.url_base,
|
||||
'cache': 'https://rss.omgwtfnzbs.me/rss-download.php?%s',
|
||||
'cache': self.url_api + 'xml/?%s',
|
||||
'search': self.url_api + 'json/?%s',
|
||||
'cache_html': self.url_base + 'browse.php?cat=tv%s',
|
||||
'search_html': self.url_base + 'browse.php?cat=tv&search=%s'}
|
||||
|
@ -69,7 +68,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
if 'notice' in data_json:
|
||||
description_text = data_json.get('notice')
|
||||
|
||||
if 'information is incorrect' in data_json.get('notice'):
|
||||
if re.search('(?i)(information is incorrect|in(?:valid|correct).*?(?:username|api))',
|
||||
data_json.get('notice')):
|
||||
logger.log(u'Incorrect authentication credentials for ' + self.name + ' : ' + str(description_text),
|
||||
logger.DEBUG)
|
||||
raise AuthException(
|
||||
|
@ -125,7 +125,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
|
||||
return result
|
||||
|
||||
def _get_cats(self, needed):
|
||||
@staticmethod
|
||||
def _get_cats(needed):
|
||||
cats = []
|
||||
if needed.need_sd:
|
||||
cats.extend(OmgwtfnzbsProvider.cat_sd)
|
||||
|
@ -140,21 +141,27 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
api_key = self._init_api()
|
||||
if False is api_key:
|
||||
return self.search_html(needed=needed, **kwargs)
|
||||
results = []
|
||||
cats = self._get_cats(needed=needed)
|
||||
if None is not api_key:
|
||||
params = {'user': self.username,
|
||||
params = {'search': '',
|
||||
'user': self.username,
|
||||
'api': api_key,
|
||||
'eng': 1,
|
||||
'catid': ','.join(cats)} # SD,HD
|
||||
|
||||
rss_url = self.urls['cache'] % urllib.urlencode(params)
|
||||
url = self.urls['cache'] % urllib.urlencode(params)
|
||||
|
||||
logger.log(self.name + u' cache update URL: ' + rss_url, logger.DEBUG)
|
||||
response = self.get_url(url)
|
||||
|
||||
data = RSSFeeds(self).get_feed(rss_url)
|
||||
data = feedparser.parse(response.replace('<xml', '<?xml').replace('>\n<info>', '?>\n<feed>\n<info>')
|
||||
.replace('<search_req>\n', '').replace('</search_req>\n', '')
|
||||
.replace('post>\n', 'entry>\n').replace('</xml>', '</feed>'))
|
||||
if data and 'entries' in data:
|
||||
return data.entries
|
||||
return []
|
||||
results = data.entries
|
||||
|
||||
self._log_search('Cache', len(results), url)
|
||||
return results
|
||||
|
||||
def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0,
|
||||
needed=neededQualities(need_all=True), **kwargs):
|
||||
|
@ -170,11 +177,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
'eng': 1,
|
||||
'nukes': 1,
|
||||
'catid': ','.join(cats), # SD,HD
|
||||
'retention': (sickbeard.USENET_RETENTION, retention)[retention or not sickbeard.USENET_RETENTION],
|
||||
'retention': retention or sickbeard.USENET_RETENTION or 0,
|
||||
'search': search}
|
||||
|
||||
search_url = self.urls['search'] % urllib.urlencode(params)
|
||||
logger.log(u'Search url: ' + search_url, logger.DEBUG)
|
||||
|
||||
data_json = self.get_url(search_url, json=True)
|
||||
if data_json and self._check_auth_from_data(data_json, is_xml=False):
|
||||
|
@ -183,6 +189,13 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
if item.get('nuked', '').startswith('1'):
|
||||
continue
|
||||
results.append(item)
|
||||
|
||||
mode = search_mode
|
||||
if 'eponly' == search_mode:
|
||||
mode = 'Episode'
|
||||
elif 'sponly' == search_mode:
|
||||
mode = 'Season'
|
||||
self._log_search(mode, len(results), search_url)
|
||||
return results
|
||||
|
||||
def search_html(self, search='', search_mode='', needed=neededQualities(need_all=True), **kwargs):
|
||||
|
|
|
@ -20,6 +20,7 @@ import os
|
|||
|
||||
import re
|
||||
import datetime
|
||||
from urllib import quote_plus
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import common
|
||||
|
@ -125,11 +126,27 @@ def compile_word_list(lookup_words, re_prefix='(^|[\W_])', re_suffix='($|[\W_])'
|
|||
|
||||
return result
|
||||
|
||||
def makeSceneShowSearchStrings(show, season=-1):
|
||||
showNames = allPossibleShowNames(show, season=season)
|
||||
|
||||
# scenify the names
|
||||
return map(sanitizeSceneName, showNames)
|
||||
def url_encode(show_names, spacer='.'):
|
||||
|
||||
return [quote_plus(n.replace('.', spacer).encode('utf-8', errors='replace')) for n in show_names]
|
||||
|
||||
|
||||
def get_show_names(ep_obj, spacer='.'):
|
||||
|
||||
old_anime, old_dirty = ep_obj.show.is_anime, ep_obj.show.dirty
|
||||
ep_obj.show.anime = 1 # used to limit results from all_possible(...)
|
||||
show_names = get_show_names_all_possible(ep_obj.show, season=ep_obj.season, spacer=spacer)
|
||||
ep_obj.show.anime = old_anime # temporary measure, so restore property then dirty flag
|
||||
ep_obj.show.dirty = old_dirty
|
||||
return show_names
|
||||
|
||||
|
||||
def get_show_names_all_possible(show, season=-1, scenify=True, spacer='.'):
|
||||
show_names = set(allPossibleShowNames(show, season=season))
|
||||
if scenify:
|
||||
show_names = map(sanitizeSceneName, show_names)
|
||||
return url_encode(show_names, spacer)
|
||||
|
||||
|
||||
def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
|
||||
|
@ -176,7 +193,7 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
|
|||
numseasons = int(numseasonsSQlResult[0][0])
|
||||
seasonStrings = ["S%02d" % int(ep_obj.scene_season)]
|
||||
|
||||
showNames = set(makeSceneShowSearchStrings(show, ep_obj.scene_season))
|
||||
showNames = get_show_names_all_possible(show, ep_obj.scene_season)
|
||||
|
||||
toReturn = []
|
||||
|
||||
|
@ -221,7 +238,7 @@ def makeSceneSearchString(show, ep_obj):
|
|||
if numseasons == 1 and not ep_obj.show.is_anime:
|
||||
epStrings = ['']
|
||||
|
||||
showNames = set(makeSceneShowSearchStrings(show, ep_obj.scene_season))
|
||||
showNames = get_show_names_all_possible(show, ep_obj.scene_season)
|
||||
|
||||
toReturn = []
|
||||
|
||||
|
|
|
@ -77,6 +77,8 @@ class Api(webserve.BaseHandler):
|
|||
def set_default_headers(self):
|
||||
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
|
||||
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
|
||||
if sickbeard.SEND_SECURITY_HEADERS:
|
||||
self.set_header('X-Frame-Options', 'SAMEORIGIN')
|
||||
|
||||
def get(self, route, *args, **kwargs):
|
||||
route = route.strip('/') or 'index'
|
||||
|
|
|
@ -123,12 +123,16 @@ class PageTemplate(Template):
|
|||
class BaseStaticFileHandler(StaticFileHandler):
|
||||
def set_extra_headers(self, path):
|
||||
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
|
||||
if sickbeard.SEND_SECURITY_HEADERS:
|
||||
self.set_header('X-Frame-Options', 'SAMEORIGIN')
|
||||
|
||||
|
||||
class BaseHandler(RequestHandler):
|
||||
def set_default_headers(self):
|
||||
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
|
||||
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
|
||||
if sickbeard.SEND_SECURITY_HEADERS:
|
||||
self.set_header('X-Frame-Options', 'SAMEORIGIN')
|
||||
|
||||
def redirect(self, url, permanent=False, status=None):
|
||||
if not url.startswith(sickbeard.WEB_ROOT):
|
||||
|
|
Loading…
Reference in a new issue