Merge branch 'hotfix/0.13.14'

This commit is contained in:
JackDandy 2018-01-25 19:50:32 +00:00
commit 2af6c24166
11 changed files with 111 additions and 72 deletions

View file

@ -1,4 +1,13 @@
### 0.13.13 (2018-01-19 00:45:00 UTC) ### 0.13.14 (2018-01-25 16:20:00 UTC)
* Add config/general/web interface/send security headers (default enabled)
* Fix usenet_crawler cache mode results
* Fix omgwtf test of invalid auth, issue when enabling propers, and updating cache
* Fix unicode shownames when searching
* Add season specific naming exceptions to nzb + btn
### 0.13.13 (2018-01-19 00:45:00 UTC)
* Fix setting episode status when testing for if it should be deleted * Fix setting episode status when testing for if it should be deleted
* Restrict setting newly added old episodes to WANTED to the last 90 days, older are set to SKIPPED * Restrict setting newly added old episodes to WANTED to the last 90 days, older are set to SKIPPED

View file

@ -567,6 +567,16 @@
</label> </label>
</div> </div>
<div class="field-pair">
<label for="send_security_headers">
<span class="component-title">Send security headers</span>
<span class="component-desc">
<input type="checkbox" name="send_security_headers" id="send_security_headers"#echo ('', $checked)[$sg_var('SEND_SECURITY_HEADERS')]#>
<p>send the following headers to increase browser security...<br />(X-Frame-Options:SAMEORIGIN)</p>
</span>
</label>
</div>
<input type="submit" class="btn config_submitter" value="Save Changes"> <input type="submit" class="btn config_submitter" value="Save Changes">
</fieldset> </fieldset>

View file

@ -130,6 +130,7 @@ WEB_IPV6 = None
WEB_IPV64 = None WEB_IPV64 = None
HANDLE_REVERSE_PROXY = False HANDLE_REVERSE_PROXY = False
SEND_SECURITY_HEADERS = True
PROXY_SETTING = None PROXY_SETTING = None
PROXY_INDEXERS = True PROXY_INDEXERS = True
@ -587,7 +588,7 @@ def initialize(console_logging=True):
HOME_SEARCH_FOCUS, USE_IMDB_INFO, IMDB_ACCOUNTS, SORT_ARTICLE, FUZZY_DATING, TRIM_ZERO, \ HOME_SEARCH_FOCUS, USE_IMDB_INFO, IMDB_ACCOUNTS, SORT_ARTICLE, FUZZY_DATING, TRIM_ZERO, \
DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, TIMEZONE_DISPLAY, \ DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, TIMEZONE_DISPLAY, \
WEB_USERNAME, WEB_PASSWORD, CALENDAR_UNPROTECTED, USE_API, API_KEY, WEB_PORT, WEB_LOG, \ WEB_USERNAME, WEB_PASSWORD, CALENDAR_UNPROTECTED, USE_API, API_KEY, WEB_PORT, WEB_LOG, \
ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY, SEND_SECURITY_HEADERS
# Gen Config/Advanced # Gen Config/Advanced
global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \ global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \
ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET
@ -788,6 +789,7 @@ def initialize(console_logging=True):
HTTPS_KEY = check_setting_str(CFG, 'General', 'https_key', 'server.key') HTTPS_KEY = check_setting_str(CFG, 'General', 'https_key', 'server.key')
HANDLE_REVERSE_PROXY = bool(check_setting_int(CFG, 'General', 'handle_reverse_proxy', 0)) HANDLE_REVERSE_PROXY = bool(check_setting_int(CFG, 'General', 'handle_reverse_proxy', 0))
SEND_SECURITY_HEADERS = bool(check_setting_int(CFG, 'General', 'send_security_headers', 1))
ROOT_DIRS = check_setting_str(CFG, 'General', 'root_dirs', '') ROOT_DIRS = check_setting_str(CFG, 'General', 'root_dirs', '')
if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', ROOT_DIRS): if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', ROOT_DIRS):
@ -1563,6 +1565,7 @@ def save_config():
new_config['General']['https_cert'] = HTTPS_CERT new_config['General']['https_cert'] = HTTPS_CERT
new_config['General']['https_key'] = HTTPS_KEY new_config['General']['https_key'] = HTTPS_KEY
new_config['General']['handle_reverse_proxy'] = int(HANDLE_REVERSE_PROXY) new_config['General']['handle_reverse_proxy'] = int(HANDLE_REVERSE_PROXY)
new_config['General']['send_security_headers'] = int(SEND_SECURITY_HEADERS)
new_config['General']['use_nzbs'] = int(USE_NZBS) new_config['General']['use_nzbs'] = int(USE_NZBS)
new_config['General']['use_torrents'] = int(USE_TORRENTS) new_config['General']['use_torrents'] = int(USE_TORRENTS)
new_config['General']['nzb_method'] = NZB_METHOD new_config['General']['nzb_method'] = NZB_METHOD

View file

@ -37,7 +37,6 @@ from sickbeard.metadata import helpers as metadata_helpers
from sickbeard import logger from sickbeard import logger
from sickbeard import encodingKludge as ek from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex from sickbeard.exceptions import ex
from sickbeard.show_name_helpers import allPossibleShowNames
from sickbeard.indexers import indexer_config from sickbeard.indexers import indexer_config
from sickbeard.indexers.indexer_config import INDEXER_TVDB, INDEXER_TVDB_V1 from sickbeard.indexers.indexer_config import INDEXER_TVDB, INDEXER_TVDB_V1

View file

@ -24,6 +24,7 @@ from sickbeard import helpers, logger, scene_exceptions, tvcache
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
from sickbeard.exceptions import AuthException from sickbeard.exceptions import AuthException
from sickbeard.helpers import tryInt from sickbeard.helpers import tryInt
from sickbeard.show_name_helpers import get_show_names
from lib.unidecode import unidecode from lib.unidecode import unidecode
try: try:
@ -293,20 +294,12 @@ class BTNProvider(generic.TorrentProvider):
base_params['tvdb'] = ep_obj.show.indexerid base_params['tvdb'] = ep_obj.show.indexerid
base_params['series'] = ep_obj.show.name base_params['series'] = ep_obj.show.name
search_params.append(base_params) search_params.append(base_params)
# elif 2 == ep_obj.show.indexer:
# current_params['tvrage'] = ep_obj.show.indexerid name_exceptions = get_show_names(ep_obj)
# search_params.append(current_params)
# else:
name_exceptions = list(
set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
dedupe = [ep_obj.show.name.replace(' ', '.')]
for name in name_exceptions: for name in name_exceptions:
if name.replace(' ', '.') not in dedupe: series_param = base_params.copy()
dedupe += [name.replace(' ', '.')] series_param['series'] = name
series_param = base_params.copy() search_params.append(series_param)
series_param['series'] = name
search_params.append(series_param)
return [dict(Season=search_params)] return [dict(Season=search_params)]
@ -318,7 +311,6 @@ class BTNProvider(generic.TorrentProvider):
search_params = [] search_params = []
base_params = {'category': 'Episode'} base_params = {'category': 'Episode'}
# episode
if ep_obj.show.air_by_date or ep_obj.show.is_sports: if ep_obj.show.air_by_date or ep_obj.show.is_sports:
date_str = str(ep_obj.airdate) date_str = str(ep_obj.airdate)
@ -333,27 +325,16 @@ class BTNProvider(generic.TorrentProvider):
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)] (ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
base_params['name'] = 'S%02dE%02d' % (season, episode) base_params['name'] = 'S%02dE%02d' % (season, episode)
# search
if 1 == ep_obj.show.indexer: if 1 == ep_obj.show.indexer:
base_params['tvdb'] = ep_obj.show.indexerid base_params['tvdb'] = ep_obj.show.indexerid
base_params['series'] = ep_obj.show.name base_params['series'] = ep_obj.show.name
search_params.append(base_params) search_params.append(base_params)
# elif 2 == ep_obj.show.indexer:
# search_params['tvrage'] = ep_obj.show.indexerid
# to_return.append(search_params)
# else: name_exceptions = get_show_names(ep_obj)
# add new query string for every exception
name_exceptions = list(
set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
dedupe = [ep_obj.show.name.replace(' ', '.')]
for name in name_exceptions: for name in name_exceptions:
if name.replace(' ', '.') not in dedupe: series_param = base_params.copy()
dedupe += [name.replace(' ', '.')] series_param['series'] = name
series_param = base_params.copy() search_params.append(series_param)
series_param['series'] = name
search_params.append(series_param)
return [dict(Episode=search_params)] return [dict(Episode=search_params)]

View file

@ -44,7 +44,7 @@ from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
from sickbeard.exceptions import SickBeardException, AuthException, ex from sickbeard.exceptions import SickBeardException, AuthException, ex
from sickbeard.helpers import maybe_plural, remove_file_failed from sickbeard.helpers import maybe_plural, remove_file_failed
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.show_name_helpers import allPossibleShowNames from sickbeard.show_name_helpers import get_show_names_all_possible
class HaltParseException(SickBeardException): class HaltParseException(SickBeardException):
@ -955,9 +955,7 @@ class TorrentProvider(object, GenericProvider):
search_params = [] search_params = []
crop = re.compile(r'([.\s])(?:\1)+') crop = re.compile(r'([.\s])(?:\1)+')
for name in set(allPossibleShowNames(self.show)): for name in get_show_names_all_possible(self.show, scenify=process_name and getattr(self, 'scene', True)):
if process_name and getattr(self, 'scene', True):
name = helpers.sanitizeSceneName(name)
for detail in ep_detail: for detail in ep_detail:
search_params += [crop.sub(r'\1', '%s %s%s' % (name, x, detail)) for x in prefix] search_params += [crop.sub(r'\1', '%s %s%s' % (name, x, detail)) for x in prefix]
return search_params return search_params

View file

@ -27,7 +27,7 @@ from math import ceil
from sickbeard.sbdatetime import sbdatetime from sickbeard.sbdatetime import sbdatetime
from . import generic from . import generic
from sickbeard import helpers, logger, scene_exceptions, tvcache, classes, db from sickbeard import helpers, logger, tvcache, classes, db
from sickbeard.common import neededQualities, Quality from sickbeard.common import neededQualities, Quality
from sickbeard.exceptions import AuthException, MultipleShowObjectsException from sickbeard.exceptions import AuthException, MultipleShowObjectsException
from sickbeard.indexers.indexer_config import * from sickbeard.indexers.indexer_config import *
@ -35,6 +35,7 @@ from io import BytesIO
from lib.dateutil import parser from lib.dateutil import parser
from sickbeard.network_timezones import sb_timezone from sickbeard.network_timezones import sb_timezone
from sickbeard.helpers import tryInt from sickbeard.helpers import tryInt
from sickbeard.show_name_helpers import get_show_names
from sickbeard.search import get_wanted_qualities, get_aired_in_season from sickbeard.search import get_wanted_qualities, get_aired_in_season
try: try:
@ -351,15 +352,11 @@ class NewznabProvider(generic.NZBProvider):
use_id = True use_id = True
use_id and search_params.append(params) use_id and search_params.append(params)
spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.'
# query search and exceptions # query search and exceptions
name_exceptions = list( name_exceptions = get_show_names(ep_obj, spacer)
set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
spacer = 'geek' in self.get_id() and ' ' or '.'
for cur_exception in name_exceptions: for cur_exception in name_exceptions:
params = base_params.copy() params = base_params.copy()
cur_exception = cur_exception.replace('.', spacer)
if 'q' in params: if 'q' in params:
params['q'] = '%s%s%s' % (cur_exception, spacer, params['q']) params['q'] = '%s%s%s' % (cur_exception, spacer, params['q'])
search_params.append(params) search_params.append(params)
@ -408,17 +405,13 @@ class NewznabProvider(generic.NZBProvider):
use_id = True use_id = True
use_id and search_params.append(params) use_id and search_params.append(params)
spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.'
# query search and exceptions # query search and exceptions
name_exceptions = list( name_exceptions = get_show_names(ep_obj, spacer)
set([helpers.sanitizeSceneName(a) for a in
scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
spacer = 'geek' in self.get_id() and ' ' or '.'
if sickbeard.scene_exceptions.has_abs_episodes(ep_obj): if sickbeard.scene_exceptions.has_abs_episodes(ep_obj):
search_params.append({'q': '%s%s%s' % (ep_obj.show.name, spacer, base_params['ep'])}) search_params.append({'q': '%s%s%s' % (ep_obj.show.name, spacer, base_params['ep'])})
for cur_exception in name_exceptions: for cur_exception in name_exceptions:
params = base_params.copy() params = base_params.copy()
cur_exception = cur_exception.replace('.', spacer)
params['q'] = cur_exception params['q'] = cur_exception
search_params.append(params) search_params.append(params)
@ -444,7 +437,7 @@ class NewznabProvider(generic.NZBProvider):
r_found = True r_found = True
while r_found: while r_found:
r_found = False r_found = False
for pattern, repl in ((r'(?i)-Obfuscated$', ''), (r'(?i)-postbot$', '')): for pattern, repl in ((r'(?i)-Obfuscated$', ''), (r'(?i)-postbot$', ''), (r'(?i)[-.]English$', '')):
if re.search(pattern, title): if re.search(pattern, title):
r_found = True r_found = True
title = re.sub(pattern, repl, title) title = re.sub(pattern, repl, title)
@ -633,8 +626,12 @@ class NewznabProvider(generic.NZBProvider):
if v in self.caps]), if v in self.caps]),
'offset': 0} 'offset': 0}
uc_only = all([re.search('(?i)usenet_crawler', self.get_id())])
base_params_uc = {'num': self.limits, 'dl': '1', 'i': '64660'}
if isinstance(api_key, basestring) and api_key not in ('0', ''): if isinstance(api_key, basestring) and api_key not in ('0', ''):
base_params['apikey'] = api_key base_params['apikey'] = api_key
base_params_uc['r'] = api_key
results, n_spaces = [], {} results, n_spaces = [], {}
total, cnt, search_url, exit_log = 0, len(results), '', True total, cnt, search_url, exit_log = 0, len(results), '', True
@ -674,6 +671,7 @@ class NewznabProvider(generic.NZBProvider):
if self.cat_ids or len(cat): if self.cat_ids or len(cat):
base_params['cat'] = ','.join(sorted(set((self.cat_ids.split(',') if self.cat_ids else []) + cat))) base_params['cat'] = ','.join(sorted(set((self.cat_ids.split(',') if self.cat_ids else []) + cat)))
base_params_uc['t'] = base_params['cat']
request_params = base_params.copy() request_params = base_params.copy()
if ('Propers' == mode or 'nzbs_org' == self.get_id()) \ if ('Propers' == mode or 'nzbs_org' == self.get_id()) \
@ -693,7 +691,10 @@ class NewznabProvider(generic.NZBProvider):
while (offset <= total) and (offset < max_items) and batch_count: while (offset <= total) and (offset < max_items) and batch_count:
cnt = len(results) cnt = len(results)
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params)) if 'Cache' == mode and uc_only:
search_url = '%srss?%s' % (self.url, urllib.urlencode(base_params_uc))
else:
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
i and time.sleep(2.1) i and time.sleep(2.1)
data = helpers.getURL(search_url) data = helpers.getURL(search_url)
@ -740,7 +741,9 @@ class NewznabProvider(generic.NZBProvider):
hits += int(0 == hits) hits += int(0 == hits)
offset = helpers.tryInt(parsed_xml.find('.//%sresponse' % n_spaces['newznab']).get('offset', 0)) offset = helpers.tryInt(parsed_xml.find('.//%sresponse' % n_spaces['newznab']).get('offset', 0))
except (AttributeError, KeyError): except (AttributeError, KeyError):
break if not uc_only:
break
total = len(items)
# No items found, prevent from doing another search # No items found, prevent from doing another search
if 0 == total: if 0 == total:
@ -753,7 +756,7 @@ class NewznabProvider(generic.NZBProvider):
first_date = self._parse_pub_date(items[0]) first_date = self._parse_pub_date(items[0])
last_date = self._parse_pub_date(items[-1]) last_date = self._parse_pub_date(items[-1])
if not first_date or not last_date or not self._last_recent_search or \ if not first_date or not last_date or not self._last_recent_search or \
last_date <= self.last_recent_search: last_date <= self.last_recent_search or uc_only:
break break
if offset != request_params['offset']: if offset != request_params['offset']:

View file

@ -22,13 +22,13 @@ import time
import traceback import traceback
import urllib import urllib
import feedparser
import sickbeard import sickbeard
from . import generic from . import generic
from sickbeard import classes, logger, show_name_helpers, tvcache from sickbeard import classes, logger, show_name_helpers, tvcache
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
from sickbeard.exceptions import AuthException from sickbeard.exceptions import AuthException
from sickbeard.rssfeeds import RSSFeeds
from sickbeard.common import neededQualities from sickbeard.common import neededQualities
@ -38,11 +38,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
generic.NZBProvider.__init__(self, 'omgwtfnzbs') generic.NZBProvider.__init__(self, 'omgwtfnzbs')
self.url = 'https://omgwtfnzbs.me/' self.url = 'https://omgwtfnzbs.me/'
self.url_base = 'https://omgwtfnzbs.me/' self.url_base = 'https://omgwtfnzbs.me/'
self.url_api = 'https://api.omgwtfnzbs.me/' self.url_api = 'https://api.omgwtfnzbs.me/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'cache': 'https://rss.omgwtfnzbs.me/rss-download.php?%s', 'cache': self.url_api + 'xml/?%s',
'search': self.url_api + 'json/?%s', 'search': self.url_api + 'json/?%s',
'cache_html': self.url_base + 'browse.php?cat=tv%s', 'cache_html': self.url_base + 'browse.php?cat=tv%s',
'search_html': self.url_base + 'browse.php?cat=tv&search=%s'} 'search_html': self.url_base + 'browse.php?cat=tv&search=%s'}
@ -69,7 +68,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
if 'notice' in data_json: if 'notice' in data_json:
description_text = data_json.get('notice') description_text = data_json.get('notice')
if 'information is incorrect' in data_json.get('notice'): if re.search('(?i)(information is incorrect|in(?:valid|correct).*?(?:username|api))',
data_json.get('notice')):
logger.log(u'Incorrect authentication credentials for ' + self.name + ' : ' + str(description_text), logger.log(u'Incorrect authentication credentials for ' + self.name + ' : ' + str(description_text),
logger.DEBUG) logger.DEBUG)
raise AuthException( raise AuthException(
@ -125,7 +125,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
return result return result
def _get_cats(self, needed): @staticmethod
def _get_cats(needed):
cats = [] cats = []
if needed.need_sd: if needed.need_sd:
cats.extend(OmgwtfnzbsProvider.cat_sd) cats.extend(OmgwtfnzbsProvider.cat_sd)
@ -140,21 +141,27 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
api_key = self._init_api() api_key = self._init_api()
if False is api_key: if False is api_key:
return self.search_html(needed=needed, **kwargs) return self.search_html(needed=needed, **kwargs)
results = []
cats = self._get_cats(needed=needed) cats = self._get_cats(needed=needed)
if None is not api_key: if None is not api_key:
params = {'user': self.username, params = {'search': '',
'user': self.username,
'api': api_key, 'api': api_key,
'eng': 1, 'eng': 1,
'catid': ','.join(cats)} # SD,HD 'catid': ','.join(cats)} # SD,HD
rss_url = self.urls['cache'] % urllib.urlencode(params) url = self.urls['cache'] % urllib.urlencode(params)
logger.log(self.name + u' cache update URL: ' + rss_url, logger.DEBUG) response = self.get_url(url)
data = RSSFeeds(self).get_feed(rss_url) data = feedparser.parse(response.replace('<xml', '<?xml').replace('>\n<info>', '?>\n<feed>\n<info>')
.replace('<search_req>\n', '').replace('</search_req>\n', '')
.replace('post>\n', 'entry>\n').replace('</xml>', '</feed>'))
if data and 'entries' in data: if data and 'entries' in data:
return data.entries results = data.entries
return []
self._log_search('Cache', len(results), url)
return results
def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0, def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0,
needed=neededQualities(need_all=True), **kwargs): needed=neededQualities(need_all=True), **kwargs):
@ -170,11 +177,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
'eng': 1, 'eng': 1,
'nukes': 1, 'nukes': 1,
'catid': ','.join(cats), # SD,HD 'catid': ','.join(cats), # SD,HD
'retention': (sickbeard.USENET_RETENTION, retention)[retention or not sickbeard.USENET_RETENTION], 'retention': retention or sickbeard.USENET_RETENTION or 0,
'search': search} 'search': search}
search_url = self.urls['search'] % urllib.urlencode(params) search_url = self.urls['search'] % urllib.urlencode(params)
logger.log(u'Search url: ' + search_url, logger.DEBUG)
data_json = self.get_url(search_url, json=True) data_json = self.get_url(search_url, json=True)
if data_json and self._check_auth_from_data(data_json, is_xml=False): if data_json and self._check_auth_from_data(data_json, is_xml=False):
@ -183,6 +189,13 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
if item.get('nuked', '').startswith('1'): if item.get('nuked', '').startswith('1'):
continue continue
results.append(item) results.append(item)
mode = search_mode
if 'eponly' == search_mode:
mode = 'Episode'
elif 'sponly' == search_mode:
mode = 'Season'
self._log_search(mode, len(results), search_url)
return results return results
def search_html(self, search='', search_mode='', needed=neededQualities(need_all=True), **kwargs): def search_html(self, search='', search_mode='', needed=neededQualities(need_all=True), **kwargs):

View file

@ -20,6 +20,7 @@ import os
import re import re
import datetime import datetime
from urllib import quote_plus
import sickbeard import sickbeard
from sickbeard import common from sickbeard import common
@ -125,11 +126,27 @@ def compile_word_list(lookup_words, re_prefix='(^|[\W_])', re_suffix='($|[\W_])'
return result return result
def makeSceneShowSearchStrings(show, season=-1):
showNames = allPossibleShowNames(show, season=season)
# scenify the names def url_encode(show_names, spacer='.'):
return map(sanitizeSceneName, showNames)
return [quote_plus(n.replace('.', spacer).encode('utf-8', errors='replace')) for n in show_names]
def get_show_names(ep_obj, spacer='.'):
old_anime, old_dirty = ep_obj.show.is_anime, ep_obj.show.dirty
ep_obj.show.anime = 1 # used to limit results from all_possible(...)
show_names = get_show_names_all_possible(ep_obj.show, season=ep_obj.season, spacer=spacer)
ep_obj.show.anime = old_anime # temporary measure, so restore property then dirty flag
ep_obj.show.dirty = old_dirty
return show_names
def get_show_names_all_possible(show, season=-1, scenify=True, spacer='.'):
show_names = set(allPossibleShowNames(show, season=season))
if scenify:
show_names = map(sanitizeSceneName, show_names)
return url_encode(show_names, spacer)
def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None): def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
@ -176,7 +193,7 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
numseasons = int(numseasonsSQlResult[0][0]) numseasons = int(numseasonsSQlResult[0][0])
seasonStrings = ["S%02d" % int(ep_obj.scene_season)] seasonStrings = ["S%02d" % int(ep_obj.scene_season)]
showNames = set(makeSceneShowSearchStrings(show, ep_obj.scene_season)) showNames = get_show_names_all_possible(show, ep_obj.scene_season)
toReturn = [] toReturn = []
@ -221,7 +238,7 @@ def makeSceneSearchString(show, ep_obj):
if numseasons == 1 and not ep_obj.show.is_anime: if numseasons == 1 and not ep_obj.show.is_anime:
epStrings = [''] epStrings = ['']
showNames = set(makeSceneShowSearchStrings(show, ep_obj.scene_season)) showNames = get_show_names_all_possible(show, ep_obj.scene_season)
toReturn = [] toReturn = []

View file

@ -77,6 +77,8 @@ class Api(webserve.BaseHandler):
def set_default_headers(self): def set_default_headers(self):
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet') self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
if sickbeard.SEND_SECURITY_HEADERS:
self.set_header('X-Frame-Options', 'SAMEORIGIN')
def get(self, route, *args, **kwargs): def get(self, route, *args, **kwargs):
route = route.strip('/') or 'index' route = route.strip('/') or 'index'

View file

@ -123,12 +123,16 @@ class PageTemplate(Template):
class BaseStaticFileHandler(StaticFileHandler): class BaseStaticFileHandler(StaticFileHandler):
def set_extra_headers(self, path): def set_extra_headers(self, path):
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet') self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
if sickbeard.SEND_SECURITY_HEADERS:
self.set_header('X-Frame-Options', 'SAMEORIGIN')
class BaseHandler(RequestHandler): class BaseHandler(RequestHandler):
def set_default_headers(self): def set_default_headers(self):
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet') self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
if sickbeard.SEND_SECURITY_HEADERS:
self.set_header('X-Frame-Options', 'SAMEORIGIN')
def redirect(self, url, permanent=False, status=None): def redirect(self, url, permanent=False, status=None):
if not url.startswith(sickbeard.WEB_ROOT): if not url.startswith(sickbeard.WEB_ROOT):