mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Merge pull request #655 from JackDandy/feature/FixAltUnicodeSearch
Fix alternative unicode show names from breaking search.
This commit is contained in:
commit
d0231cea56
5 changed files with 25 additions and 14 deletions
|
@ -36,6 +36,7 @@
|
|||
* Change indicate when not sorting with article by dimming ("The", "A", "An") on Show List, Episode, History,
|
||||
Mass Update, Add with Browse and from Existing views
|
||||
* Add Emby notifier to config/Notifications
|
||||
* Fix alternative unicode show names from breaking search
|
||||
|
||||
|
||||
### 0.11.6 (2016-02-18 23:10:00 UTC)
|
||||
|
|
|
@ -131,6 +131,11 @@ class GenericProvider:
|
|||
|
||||
return result
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def cb_response(self, r, *args, **kwargs):
|
||||
self.session.response = dict(url=r.url, status_code=r.status_code, elapsed=r.elapsed, from_cache=r.from_cache)
|
||||
return r
|
||||
|
||||
def get_url(self, url, post_data=None, params=None, timeout=30, json=False):
|
||||
"""
|
||||
By default this is just a simple urlopen call but this method should be overridden
|
||||
|
@ -142,7 +147,7 @@ class GenericProvider:
|
|||
return
|
||||
|
||||
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
|
||||
session=self.session, json=json)
|
||||
session=self.session, json=json, hooks=dict(response=self.cb_response))
|
||||
|
||||
def download_result(self, result):
|
||||
"""
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import time
|
||||
import urllib
|
||||
|
||||
import sickbeard
|
||||
|
||||
|
@ -84,8 +83,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
categories = self.get_url('%s/api' % self.url, params=params, timeout=10)
|
||||
if not categories:
|
||||
logger.log(u'Error getting html for [%s/api?%s]' %
|
||||
(self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items())), logger.DEBUG)
|
||||
logger.log(u'Error getting html for [%s]' % self.session.response['url'], logger.DEBUG)
|
||||
return (False, return_categories, 'Error getting html for [%s]' %
|
||||
('%s/api?%s' % (self.url, '&'.join('%s=%s' % (x, y) for x, y in params.items()))))
|
||||
|
||||
|
@ -258,9 +256,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
# hardcoded to stop after a max of 4 hits (400 items) per query
|
||||
while (offset <= total) and (offset < (200, 400)[self.supports_tvdbid()]) and batch_count:
|
||||
cnt = len(results)
|
||||
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
|
||||
|
||||
data = self.cache.getRSSFeed(search_url)
|
||||
data = self.cache.getRSSFeed('%sapi' % self.url, params=request_params)
|
||||
i and time.sleep(1.1)
|
||||
|
||||
if not data or not self.check_auth_from_data(data):
|
||||
|
@ -295,13 +292,13 @@ class NewznabProvider(generic.NZBProvider):
|
|||
break
|
||||
|
||||
if offset != request_params['offset']:
|
||||
logger.log('Tell your newznab provider to fix their bloody newznab responses')
|
||||
logger.log('Ask your newznab provider to fix their newznab responses')
|
||||
break
|
||||
|
||||
request_params['offset'] += request_params['limit']
|
||||
if total <= request_params['offset']:
|
||||
exit_log = True
|
||||
logger.log('%s item%s found that will be used for episode matching' % (total, helpers.maybe_plural(total)),
|
||||
logger.log('%s item%s found for episode matching' % (total, helpers.maybe_plural(total)),
|
||||
logger.DEBUG)
|
||||
break
|
||||
|
||||
|
@ -310,10 +307,10 @@ class NewznabProvider(generic.NZBProvider):
|
|||
logger.log('%s more item%s to fetch from a batch of up to %s items.'
|
||||
% (items, helpers.maybe_plural(items), request_params['limit']), logger.DEBUG)
|
||||
|
||||
batch_count = self._log_result(results, mode, cnt, search_url)
|
||||
batch_count = self._log_result(results, mode, cnt, data.rq_response['url'])
|
||||
|
||||
if exit_log:
|
||||
self._log_result(results, mode, cnt, search_url)
|
||||
self._log_result(results, mode, cnt, data and data.rq_response['url'] or '%sapi' % self.url)
|
||||
exit_log = False
|
||||
|
||||
if 'tvdbid' in request_params and len(results):
|
||||
|
|
|
@ -14,6 +14,7 @@ class RSSFeeds:
|
|||
def __init__(self, provider=None):
|
||||
|
||||
self.provider = provider
|
||||
self.response = None
|
||||
|
||||
def _check_auth_cookie(self):
|
||||
|
||||
|
@ -21,7 +22,12 @@ class RSSFeeds:
|
|||
return self.provider.check_auth_cookie()
|
||||
return True
|
||||
|
||||
def get_feed(self, url, request_headers=None):
|
||||
# noinspection PyUnusedLocal
|
||||
def cb_response(self, r, *args, **kwargs):
|
||||
self.response = dict(url=r.url, elapsed=r.elapsed, from_cache=r.from_cache)
|
||||
return r
|
||||
|
||||
def get_feed(self, url, request_headers=None, **kwargs):
|
||||
|
||||
if not self._check_auth_cookie():
|
||||
return
|
||||
|
@ -30,12 +36,14 @@ class RSSFeeds:
|
|||
if self.provider and hasattr(self.provider, 'session'):
|
||||
session = self.provider.session
|
||||
|
||||
response = helpers.getURL(url, headers=request_headers, session=session)
|
||||
response = helpers.getURL(url, headers=request_headers, session=session,
|
||||
hooks=dict(response=self.cb_response), **kwargs)
|
||||
if not response:
|
||||
return
|
||||
|
||||
try:
|
||||
feed = feedparser.parse(response)
|
||||
feed['rq_response'] = self.response
|
||||
if feed and 'entries' in feed:
|
||||
return feed
|
||||
|
||||
|
|
|
@ -107,8 +107,8 @@ class TVCache:
|
|||
|
||||
return []
|
||||
|
||||
def getRSSFeed(self, url):
|
||||
return RSSFeeds(self.provider).get_feed(url)
|
||||
def getRSSFeed(self, url, **kwargs):
|
||||
return RSSFeeds(self.provider).get_feed(url, **kwargs)
|
||||
|
||||
def _translateTitle(self, title):
|
||||
return u'' + title.replace(' ', '.')
|
||||
|
|
Loading…
Reference in a new issue