diff --git a/CHANGES.md b/CHANGES.md
index 4eeacc35..58bf8dc3 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -11,6 +11,15 @@
* Add 'PB', 'EB', 'ZB', 'YB' to recognised output sizes
+### 0.13.14 (2018-01-25 16:20:00 UTC)
+
+* Add config/general/web interface/send security headers (default enabled)
+* Fix usenet_crawler cache mode results
+* Fix omgwtf test of invalid auth, issue when enabling propers, and updating cache
+* Fix unicode shownames when searching
+* Add season specific naming exceptions to nzb + btn
+
+
### 0.13.13 (2018-01-19 00:45:00 UTC)
* Fix setting episode status when testing for if it should be deleted
diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl
index 97abb763..3e924340 100644
--- a/gui/slick/interfaces/default/config_general.tmpl
+++ b/gui/slick/interfaces/default/config_general.tmpl
@@ -567,6 +567,16 @@
+
+
+
+
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 98631041..8e995ebf 100755
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -130,6 +130,7 @@ WEB_IPV6 = None
WEB_IPV64 = None
HANDLE_REVERSE_PROXY = False
+SEND_SECURITY_HEADERS = True
PROXY_SETTING = None
PROXY_INDEXERS = True
@@ -587,7 +588,7 @@ def initialize(console_logging=True):
HOME_SEARCH_FOCUS, USE_IMDB_INFO, IMDB_ACCOUNTS, SORT_ARTICLE, FUZZY_DATING, TRIM_ZERO, \
DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, TIMEZONE_DISPLAY, \
WEB_USERNAME, WEB_PASSWORD, CALENDAR_UNPROTECTED, USE_API, API_KEY, WEB_PORT, WEB_LOG, \
- ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY
+ ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY, SEND_SECURITY_HEADERS
# Gen Config/Advanced
global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \
ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET
@@ -788,6 +789,7 @@ def initialize(console_logging=True):
HTTPS_KEY = check_setting_str(CFG, 'General', 'https_key', 'server.key')
HANDLE_REVERSE_PROXY = bool(check_setting_int(CFG, 'General', 'handle_reverse_proxy', 0))
+ SEND_SECURITY_HEADERS = bool(check_setting_int(CFG, 'General', 'send_security_headers', 1))
ROOT_DIRS = check_setting_str(CFG, 'General', 'root_dirs', '')
if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', ROOT_DIRS):
@@ -1561,6 +1563,7 @@ def save_config():
new_config['General']['https_cert'] = HTTPS_CERT
new_config['General']['https_key'] = HTTPS_KEY
new_config['General']['handle_reverse_proxy'] = int(HANDLE_REVERSE_PROXY)
+ new_config['General']['send_security_headers'] = int(SEND_SECURITY_HEADERS)
new_config['General']['use_nzbs'] = int(USE_NZBS)
new_config['General']['use_torrents'] = int(USE_TORRENTS)
new_config['General']['nzb_method'] = NZB_METHOD
diff --git a/sickbeard/metadata/generic.py b/sickbeard/metadata/generic.py
index 08883cd9..88dac4cf 100644
--- a/sickbeard/metadata/generic.py
+++ b/sickbeard/metadata/generic.py
@@ -37,7 +37,6 @@ from sickbeard.metadata import helpers as metadata_helpers
from sickbeard import logger
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
-from sickbeard.show_name_helpers import allPossibleShowNames
from sickbeard.indexers import indexer_config
from sickbeard.indexers.indexer_config import INDEXER_TVDB, INDEXER_TVDB_V1
diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py
index 288da4ae..ca37af5a 100644
--- a/sickbeard/providers/btn.py
+++ b/sickbeard/providers/btn.py
@@ -24,6 +24,7 @@ from sickbeard import helpers, logger, scene_exceptions, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickbeard.exceptions import AuthException
from sickbeard.helpers import tryInt
+from sickbeard.show_name_helpers import get_show_names
from lib.unidecode import unidecode
try:
@@ -293,20 +294,12 @@ class BTNProvider(generic.TorrentProvider):
base_params['tvdb'] = ep_obj.show.indexerid
base_params['series'] = ep_obj.show.name
search_params.append(base_params)
- # elif 2 == ep_obj.show.indexer:
- # current_params['tvrage'] = ep_obj.show.indexerid
- # search_params.append(current_params)
- # else:
- name_exceptions = list(
- set([helpers.sanitizeSceneName(a) for a in
- scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
- dedupe = [ep_obj.show.name.replace(' ', '.')]
+
+ name_exceptions = get_show_names(ep_obj)
for name in name_exceptions:
- if name.replace(' ', '.') not in dedupe:
- dedupe += [name.replace(' ', '.')]
- series_param = base_params.copy()
- series_param['series'] = name
- search_params.append(series_param)
+ series_param = base_params.copy()
+ series_param['series'] = name
+ search_params.append(series_param)
return [dict(Season=search_params)]
@@ -318,7 +311,6 @@ class BTNProvider(generic.TorrentProvider):
search_params = []
base_params = {'category': 'Episode'}
- # episode
if ep_obj.show.air_by_date or ep_obj.show.is_sports:
date_str = str(ep_obj.airdate)
@@ -333,27 +325,16 @@ class BTNProvider(generic.TorrentProvider):
(ep_obj.scene_season, ep_obj.scene_episode))[bool(ep_obj.show.is_scene)]
base_params['name'] = 'S%02dE%02d' % (season, episode)
- # search
if 1 == ep_obj.show.indexer:
base_params['tvdb'] = ep_obj.show.indexerid
base_params['series'] = ep_obj.show.name
search_params.append(base_params)
- # elif 2 == ep_obj.show.indexer:
- # search_params['tvrage'] = ep_obj.show.indexerid
- # to_return.append(search_params)
- # else:
- # add new query string for every exception
- name_exceptions = list(
- set([helpers.sanitizeSceneName(a) for a in
- scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
- dedupe = [ep_obj.show.name.replace(' ', '.')]
+ name_exceptions = get_show_names(ep_obj)
for name in name_exceptions:
- if name.replace(' ', '.') not in dedupe:
- dedupe += [name.replace(' ', '.')]
- series_param = base_params.copy()
- series_param['series'] = name
- search_params.append(series_param)
+ series_param = base_params.copy()
+ series_param['series'] = name
+ search_params.append(series_param)
return [dict(Episode=search_params)]
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index 1658eb3a..bef4274a 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -44,7 +44,7 @@ from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
from sickbeard.exceptions import SickBeardException, AuthException, ex
from sickbeard.helpers import maybe_plural, remove_file_failed
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
-from sickbeard.show_name_helpers import allPossibleShowNames
+from sickbeard.show_name_helpers import get_show_names_all_possible
class HaltParseException(SickBeardException):
@@ -955,9 +955,7 @@ class TorrentProvider(object, GenericProvider):
search_params = []
crop = re.compile(r'([.\s])(?:\1)+')
- for name in set(allPossibleShowNames(self.show)):
- if process_name and getattr(self, 'scene', True):
- name = helpers.sanitizeSceneName(name)
+ for name in get_show_names_all_possible(self.show, scenify=process_name and getattr(self, 'scene', True)):
for detail in ep_detail:
search_params += [crop.sub(r'\1', '%s %s%s' % (name, x, detail)) for x in prefix]
return search_params
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index c6fd1a60..7c3cc1da 100755
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -27,7 +27,7 @@ from math import ceil
from sickbeard.sbdatetime import sbdatetime
from . import generic
-from sickbeard import helpers, logger, scene_exceptions, tvcache, classes, db
+from sickbeard import helpers, logger, tvcache, classes, db
from sickbeard.common import neededQualities, Quality
from sickbeard.exceptions import AuthException, MultipleShowObjectsException
from sickbeard.indexers.indexer_config import *
@@ -35,6 +35,7 @@ from io import BytesIO
from lib.dateutil import parser
from sickbeard.network_timezones import sb_timezone
from sickbeard.helpers import tryInt
+from sickbeard.show_name_helpers import get_show_names
from sickbeard.search import get_wanted_qualities, get_aired_in_season
try:
@@ -351,15 +352,11 @@ class NewznabProvider(generic.NZBProvider):
use_id = True
use_id and search_params.append(params)
+ spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.'
# query search and exceptions
- name_exceptions = list(
- set([helpers.sanitizeSceneName(a) for a in
- scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
-
- spacer = 'geek' in self.get_id() and ' ' or '.'
+ name_exceptions = get_show_names(ep_obj, spacer)
for cur_exception in name_exceptions:
params = base_params.copy()
- cur_exception = cur_exception.replace('.', spacer)
if 'q' in params:
params['q'] = '%s%s%s' % (cur_exception, spacer, params['q'])
search_params.append(params)
@@ -408,17 +405,13 @@ class NewznabProvider(generic.NZBProvider):
use_id = True
use_id and search_params.append(params)
+ spacer = 'nzbgeek.info' in self.url.lower() and ' ' or '.'
# query search and exceptions
- name_exceptions = list(
- set([helpers.sanitizeSceneName(a) for a in
- scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]]))
-
- spacer = 'geek' in self.get_id() and ' ' or '.'
+ name_exceptions = get_show_names(ep_obj, spacer)
if sickbeard.scene_exceptions.has_abs_episodes(ep_obj):
search_params.append({'q': '%s%s%s' % (ep_obj.show.name, spacer, base_params['ep'])})
for cur_exception in name_exceptions:
params = base_params.copy()
- cur_exception = cur_exception.replace('.', spacer)
params['q'] = cur_exception
search_params.append(params)
@@ -444,7 +437,7 @@ class NewznabProvider(generic.NZBProvider):
r_found = True
while r_found:
r_found = False
- for pattern, repl in ((r'(?i)-Obfuscated$', ''), (r'(?i)-postbot$', '')):
+ for pattern, repl in ((r'(?i)-Obfuscated$', ''), (r'(?i)-postbot$', ''), (r'(?i)[-.]English$', '')):
if re.search(pattern, title):
r_found = True
title = re.sub(pattern, repl, title)
@@ -633,8 +626,12 @@ class NewznabProvider(generic.NZBProvider):
if v in self.caps]),
'offset': 0}
+ uc_only = all([re.search('(?i)usenet_crawler', self.get_id())])
+ base_params_uc = {'num': self.limits, 'dl': '1', 'i': '64660'}
+
if isinstance(api_key, basestring) and api_key not in ('0', ''):
base_params['apikey'] = api_key
+ base_params_uc['r'] = api_key
results, n_spaces = [], {}
total, cnt, search_url, exit_log = 0, len(results), '', True
@@ -674,6 +671,7 @@ class NewznabProvider(generic.NZBProvider):
if self.cat_ids or len(cat):
base_params['cat'] = ','.join(sorted(set((self.cat_ids.split(',') if self.cat_ids else []) + cat)))
+ base_params_uc['t'] = base_params['cat']
request_params = base_params.copy()
if ('Propers' == mode or 'nzbs_org' == self.get_id()) \
@@ -693,7 +691,10 @@ class NewznabProvider(generic.NZBProvider):
while (offset <= total) and (offset < max_items) and batch_count:
cnt = len(results)
- search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
+ if 'Cache' == mode and uc_only:
+ search_url = '%srss?%s' % (self.url, urllib.urlencode(base_params_uc))
+ else:
+ search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
i and time.sleep(2.1)
data = helpers.getURL(search_url)
@@ -740,7 +741,9 @@ class NewznabProvider(generic.NZBProvider):
hits += int(0 == hits)
offset = helpers.tryInt(parsed_xml.find('.//%sresponse' % n_spaces['newznab']).get('offset', 0))
except (AttributeError, KeyError):
- break
+ if not uc_only:
+ break
+ total = len(items)
# No items found, prevent from doing another search
if 0 == total:
@@ -753,7 +756,7 @@ class NewznabProvider(generic.NZBProvider):
first_date = self._parse_pub_date(items[0])
last_date = self._parse_pub_date(items[-1])
if not first_date or not last_date or not self._last_recent_search or \
- last_date <= self.last_recent_search:
+ last_date <= self.last_recent_search or uc_only:
break
if offset != request_params['offset']:
diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py
index ebcd45b2..e2bbbc0a 100644
--- a/sickbeard/providers/omgwtfnzbs.py
+++ b/sickbeard/providers/omgwtfnzbs.py
@@ -22,13 +22,13 @@ import time
import traceback
import urllib
+import feedparser
import sickbeard
from . import generic
from sickbeard import classes, logger, show_name_helpers, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickbeard.exceptions import AuthException
-from sickbeard.rssfeeds import RSSFeeds
from sickbeard.common import neededQualities
@@ -38,11 +38,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
generic.NZBProvider.__init__(self, 'omgwtfnzbs')
self.url = 'https://omgwtfnzbs.me/'
-
self.url_base = 'https://omgwtfnzbs.me/'
self.url_api = 'https://api.omgwtfnzbs.me/'
self.urls = {'config_provider_home_uri': self.url_base,
- 'cache': 'https://rss.omgwtfnzbs.me/rss-download.php?%s',
+ 'cache': self.url_api + 'xml/?%s',
'search': self.url_api + 'json/?%s',
'cache_html': self.url_base + 'browse.php?cat=tv%s',
'search_html': self.url_base + 'browse.php?cat=tv&search=%s'}
@@ -69,7 +68,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
if 'notice' in data_json:
description_text = data_json.get('notice')
- if 'information is incorrect' in data_json.get('notice'):
+ if re.search('(?i)(information is incorrect|in(?:valid|correct).*?(?:username|api))',
+ data_json.get('notice')):
logger.log(u'Incorrect authentication credentials for ' + self.name + ' : ' + str(description_text),
logger.DEBUG)
raise AuthException(
@@ -125,7 +125,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
return result
- def _get_cats(self, needed):
+ @staticmethod
+ def _get_cats(needed):
cats = []
if needed.need_sd:
cats.extend(OmgwtfnzbsProvider.cat_sd)
@@ -140,21 +141,27 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
api_key = self._init_api()
if False is api_key:
return self.search_html(needed=needed, **kwargs)
+ results = []
cats = self._get_cats(needed=needed)
if None is not api_key:
- params = {'user': self.username,
+ params = {'search': '',
+ 'user': self.username,
'api': api_key,
'eng': 1,
'catid': ','.join(cats)} # SD,HD
- rss_url = self.urls['cache'] % urllib.urlencode(params)
+ url = self.urls['cache'] % urllib.urlencode(params)
- logger.log(self.name + u' cache update URL: ' + rss_url, logger.DEBUG)
+ response = self.get_url(url)
- data = RSSFeeds(self).get_feed(rss_url)
+ data = feedparser.parse(response.replace('\n', '?>\n\n')
+ .replace('\n', '').replace('\n', '')
+ .replace('post>\n', 'entry>\n').replace('', ''))
if data and 'entries' in data:
- return data.entries
- return []
+ results = data.entries
+
+ self._log_search('Cache', len(results), url)
+ return results
def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0,
needed=neededQualities(need_all=True), **kwargs):
@@ -170,11 +177,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
'eng': 1,
'nukes': 1,
'catid': ','.join(cats), # SD,HD
- 'retention': (sickbeard.USENET_RETENTION, retention)[retention or not sickbeard.USENET_RETENTION],
+ 'retention': retention or sickbeard.USENET_RETENTION or 0,
'search': search}
search_url = self.urls['search'] % urllib.urlencode(params)
- logger.log(u'Search url: ' + search_url, logger.DEBUG)
data_json = self.get_url(search_url, json=True)
if data_json and self._check_auth_from_data(data_json, is_xml=False):
@@ -183,6 +189,13 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
if item.get('nuked', '').startswith('1'):
continue
results.append(item)
+
+ mode = search_mode
+ if 'eponly' == search_mode:
+ mode = 'Episode'
+ elif 'sponly' == search_mode:
+ mode = 'Season'
+ self._log_search(mode, len(results), search_url)
return results
def search_html(self, search='', search_mode='', needed=neededQualities(need_all=True), **kwargs):
diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py
index aa6568f1..16c2a0a5 100644
--- a/sickbeard/show_name_helpers.py
+++ b/sickbeard/show_name_helpers.py
@@ -20,6 +20,7 @@ import os
import re
import datetime
+from urllib import quote_plus
import sickbeard
from sickbeard import common
@@ -125,11 +126,27 @@ def compile_word_list(lookup_words, re_prefix='(^|[\W_])', re_suffix='($|[\W_])'
return result
-def makeSceneShowSearchStrings(show, season=-1):
- showNames = allPossibleShowNames(show, season=season)
- # scenify the names
- return map(sanitizeSceneName, showNames)
+def url_encode(show_names, spacer='.'):
+
+ return [quote_plus(n.replace('.', spacer).encode('utf-8', errors='replace')) for n in show_names]
+
+
+def get_show_names(ep_obj, spacer='.'):
+
+ old_anime, old_dirty = ep_obj.show.is_anime, ep_obj.show.dirty
+ ep_obj.show.anime = 1 # used to limit results from all_possible(...)
+ show_names = get_show_names_all_possible(ep_obj.show, season=ep_obj.season, spacer=spacer)
+ ep_obj.show.anime = old_anime # temporary measure, so restore property then dirty flag
+ ep_obj.show.dirty = old_dirty
+ return show_names
+
+
+def get_show_names_all_possible(show, season=-1, scenify=True, spacer='.'):
+ show_names = set(allPossibleShowNames(show, season=season))
+ if scenify:
+ show_names = map(sanitizeSceneName, show_names)
+ return url_encode(show_names, spacer)
def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
@@ -176,7 +193,7 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
numseasons = int(numseasonsSQlResult[0][0])
seasonStrings = ["S%02d" % int(ep_obj.scene_season)]
- showNames = set(makeSceneShowSearchStrings(show, ep_obj.scene_season))
+ showNames = get_show_names_all_possible(show, ep_obj.scene_season)
toReturn = []
@@ -221,7 +238,7 @@ def makeSceneSearchString(show, ep_obj):
if numseasons == 1 and not ep_obj.show.is_anime:
epStrings = ['']
- showNames = set(makeSceneShowSearchStrings(show, ep_obj.scene_season))
+ showNames = get_show_names_all_possible(show, ep_obj.scene_season)
toReturn = []
diff --git a/sickbeard/webapi.py b/sickbeard/webapi.py
index 5444c91a..b7c5ffae 100644
--- a/sickbeard/webapi.py
+++ b/sickbeard/webapi.py
@@ -77,6 +77,8 @@ class Api(webserve.BaseHandler):
def set_default_headers(self):
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
+ if sickbeard.SEND_SECURITY_HEADERS:
+ self.set_header('X-Frame-Options', 'SAMEORIGIN')
def get(self, route, *args, **kwargs):
route = route.strip('/') or 'index'
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 45c93c41..04dc2cb6 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -123,12 +123,16 @@ class PageTemplate(Template):
class BaseStaticFileHandler(StaticFileHandler):
def set_extra_headers(self, path):
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
+ if sickbeard.SEND_SECURITY_HEADERS:
+ self.set_header('X-Frame-Options', 'SAMEORIGIN')
class BaseHandler(RequestHandler):
def set_default_headers(self):
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
self.set_header('X-Robots-Tag', 'noindex, nofollow, noarchive, nocache, noodp, noydir, noimageindex, nosnippet')
+ if sickbeard.SEND_SECURITY_HEADERS:
+ self.set_header('X-Frame-Options', 'SAMEORIGIN')
def redirect(self, url, permanent=False, status=None):
if not url.startswith(sickbeard.WEB_ROOT):