mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-03 18:03:37 +00:00
More RSS cache fixes
This commit is contained in:
parent
7a22f6c77a
commit
83bcc63683
9 changed files with 50 additions and 107 deletions
|
@ -167,7 +167,7 @@ def sanitizeFileName(name):
|
|||
return name
|
||||
|
||||
|
||||
def getURL(url, post_data=None, headers=None, params=None, timeout=None):
|
||||
def getURL(url, post_data=None, headers=None, params=None, json=False):
|
||||
"""
|
||||
Returns a byte-string retrieved from the url provider.
|
||||
"""
|
||||
|
@ -206,6 +206,9 @@ Returns a byte-string retrieved from the url provider.
|
|||
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||
return None
|
||||
|
||||
if json:
|
||||
return resp.json() if resp.ok else None
|
||||
|
||||
return resp.content if resp.ok else None
|
||||
|
||||
|
||||
|
|
|
@ -52,8 +52,7 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
|
||||
def getQuality(self, item):
|
||||
|
||||
filename = helpers.get_xml_text(
|
||||
item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
||||
filename = item.filename
|
||||
quality = Quality.nameQuality(filename)
|
||||
|
||||
return quality
|
||||
|
@ -121,7 +120,6 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
items = data.entries
|
||||
|
||||
results = []
|
||||
|
||||
for curItem in items:
|
||||
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
@ -139,9 +137,7 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||
def _get_title_and_url(self, item):
|
||||
(title, url) = generic.TorrentProvider._get_title_and_url(self, item)
|
||||
|
||||
filename = helpers.get_xml_text(
|
||||
item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
||||
|
||||
filename = item.filename
|
||||
if filename:
|
||||
new_title = self._extract_name_from_filename(filename)
|
||||
if new_title:
|
||||
|
|
|
@ -103,7 +103,7 @@ class GenericProvider:
|
|||
|
||||
return result
|
||||
|
||||
def getURL(self, url, post_data=None, headers=None):
|
||||
def getURL(self, url, post_data=None, headers=None, json=False):
|
||||
"""
|
||||
By default this is just a simple urlopen call but this method should be overridden
|
||||
for providers with special URL requirements (like cookies)
|
||||
|
@ -112,7 +112,7 @@ class GenericProvider:
|
|||
if not headers:
|
||||
headers = []
|
||||
|
||||
data = helpers.getURL(url, post_data, headers)
|
||||
data = helpers.getURL(url, post_data, headers, json=json)
|
||||
|
||||
if not data:
|
||||
logger.log(u"Error loading " + self.name + " URL: " + url, logger.ERROR)
|
||||
|
@ -244,7 +244,7 @@ class GenericProvider:
|
|||
self._checkAuth()
|
||||
|
||||
# XEM episode scene numbering
|
||||
sceneEpisode = copy.copy(episode)
|
||||
sceneEpisode = copy.deepcopy(episode)
|
||||
sceneEpisode.convertToSceneNumbering()
|
||||
|
||||
logger.log(u'Searching "%s" for "%s" as "%s"'
|
||||
|
@ -440,15 +440,3 @@ class TorrentProvider(GenericProvider):
|
|||
GenericProvider.__init__(self, name)
|
||||
|
||||
self.providerType = GenericProvider.TORRENT
|
||||
|
||||
# self.option = {SEED_POLICY_TIME : '',
|
||||
# SEED_POLICY_RATIO: '',
|
||||
# 'PROCESS_METHOD': ''
|
||||
# }
|
||||
|
||||
# def get_provider_options(self):
|
||||
# pass
|
||||
#
|
||||
# def set_provider_options(self):
|
||||
# self.option[SEED_POLICY_TIME] + '|' + self.option[SEED_POLICY_RATIO] + '|' + self.option['PROCESS_METHOD']
|
||||
|
||||
|
|
|
@ -53,17 +53,16 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
return True
|
||||
|
||||
def _checkAuthFromData(self, parsedJSON):
|
||||
def _checkAuthFromData(self, data):
|
||||
|
||||
if parsedJSON is None:
|
||||
if data is None:
|
||||
return self._checkAuth()
|
||||
|
||||
if 'status' in parsedJSON and 'message' in parsedJSON:
|
||||
if parsedJSON.get('status') == 5:
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['message'],
|
||||
logger.DEBUG)
|
||||
raise AuthException(
|
||||
"Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||
if data.status == 5:
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + data.feed.title,
|
||||
logger.DEBUG)
|
||||
raise AuthException(
|
||||
"Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||
|
||||
return True
|
||||
|
||||
|
@ -81,26 +80,19 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
if results or not manualSearch:
|
||||
return results
|
||||
|
||||
data = self.getURL(self.search_url, post_data=self._make_post_data_JSON(show=episode.show, episode=episode))
|
||||
data = self.getRSSFeed(self.search_url, post_data=self._make_post_data_JSON(show=episode.show, episode=episode))
|
||||
|
||||
if not data:
|
||||
logger.log(u"No data returned from " + self.search_url, logger.ERROR)
|
||||
return []
|
||||
|
||||
parsedJSON = helpers.parse_json(data)
|
||||
|
||||
if parsedJSON is None:
|
||||
logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR)
|
||||
return []
|
||||
|
||||
if self._checkAuthFromData(parsedJSON):
|
||||
if self._checkAuthFromData(data):
|
||||
results = []
|
||||
|
||||
if parsedJSON and 'data' in parsedJSON:
|
||||
items = parsedJSON['data']
|
||||
else:
|
||||
items = data.entries
|
||||
if not len(items) > 0:
|
||||
logger.log(u"Resulting JSON from " + self.name + " isn't correct, not parsing it", logger.ERROR)
|
||||
items = []
|
||||
return []
|
||||
|
||||
for item in items:
|
||||
|
||||
|
@ -144,8 +136,8 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
def _get_title_and_url(self, item):
|
||||
|
||||
title = item['name']
|
||||
url = self.download_url + urllib.urlencode({'id': item['id'], 'passkey': sickbeard.HDBITS_PASSKEY})
|
||||
title = item.title
|
||||
url = self.download_url + urllib.urlencode({'id': item.id, 'passkey': sickbeard.HDBITS_PASSKEY})
|
||||
|
||||
return (title, url)
|
||||
|
||||
|
@ -199,18 +191,9 @@ class HDBitsCache(tvcache.TVCache):
|
|||
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
|
||||
self._clearCache()
|
||||
|
||||
parsedJSON = helpers.parse_json(data)
|
||||
|
||||
if parsedJSON is None:
|
||||
logger.log(u"Error trying to load " + self.provider.name + " JSON feed", logger.ERROR)
|
||||
return []
|
||||
|
||||
if self._checkAuth(parsedJSON):
|
||||
if parsedJSON and 'data' in parsedJSON:
|
||||
items = parsedJSON['data']
|
||||
else:
|
||||
logger.log(u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it",
|
||||
logger.ERROR)
|
||||
if self._checkAuth(data):
|
||||
items = data.entries
|
||||
if not len(items) > 0:
|
||||
return []
|
||||
|
||||
cl = []
|
||||
|
@ -248,5 +231,4 @@ class HDBitsCache(tvcache.TVCache):
|
|||
def _checkAuth(self, data):
|
||||
return self.provider._checkAuthFromData(data)
|
||||
|
||||
|
||||
provider = HDBitsProvider()
|
||||
|
|
|
@ -81,7 +81,7 @@ class NewzbinProvider(generic.NZBProvider):
|
|||
return sickbeard.NEWZBIN
|
||||
|
||||
def getQuality(self, item):
|
||||
attributes = item.getElementsByTagName('report:attributes')[0]
|
||||
attributes = item.report[0]
|
||||
attr_dict = {}
|
||||
|
||||
for attribute in attributes.getElementsByTagName('report:attribute'):
|
||||
|
|
|
@ -178,7 +178,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
raise AuthException(
|
||||
"Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
|
||||
else:
|
||||
logger.log(u"Unknown error given from " + self.name + ": " + data.description,
|
||||
logger.log(u"Unknown error given from " + self.name + ": " + data.feed.title,
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -224,7 +224,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
results.append(curItem)
|
||||
else:
|
||||
logger.log(
|
||||
u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable",
|
||||
u"The data returned from the " + self.name + " RSS feed is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
return results
|
||||
|
@ -244,22 +244,11 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
(title, url) = self._get_title_and_url(item)
|
||||
|
||||
description_node = item.find('pubDate')
|
||||
description_text = helpers.get_xml_text(description_node)
|
||||
|
||||
try:
|
||||
# we could probably do dateStr = descriptionStr but we want date in this format
|
||||
date_text = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}',
|
||||
description_text).group(1)
|
||||
except:
|
||||
date_text = None
|
||||
|
||||
if not date_text:
|
||||
if not item.published_parsed:
|
||||
logger.log(u"Unable to figure out the date for entry " + title + ", skipping it")
|
||||
continue
|
||||
else:
|
||||
|
||||
result_date = email.utils.parsedate(date_text)
|
||||
result_date = item.published_parsed
|
||||
if result_date:
|
||||
result_date = datetime.datetime(*result_date[0:6])
|
||||
|
||||
|
@ -296,10 +285,6 @@ class NewznabCache(tvcache.TVCache):
|
|||
logger.log(u"No data returned from " + rss_url, logger.ERROR)
|
||||
return None
|
||||
|
||||
# hack this in until it's fixed server side
|
||||
#if data and not data.startswith('<?xml'):
|
||||
# data = '<?xml version="1.0" encoding="ISO-8859-1" ?>' + data
|
||||
|
||||
return data
|
||||
|
||||
def _checkAuth(self, data):
|
||||
|
|
|
@ -28,7 +28,6 @@ except ImportError:
|
|||
from sickbeard import exceptions, logger
|
||||
from sickbeard import tvcache, show_name_helpers
|
||||
|
||||
|
||||
class NZBsRUSProvider(generic.NZBProvider):
|
||||
def __init__(self):
|
||||
generic.NZBProvider.__init__(self, "NZBs'R'US")
|
||||
|
@ -68,29 +67,26 @@ class NZBsRUSProvider(generic.NZBProvider):
|
|||
searchURL = self.url + 'api.php?' + urllib.urlencode(params)
|
||||
logger.log(u"NZBS'R'US search url: " + searchURL, logger.DEBUG)
|
||||
|
||||
data = self.getURL(searchURL)
|
||||
data = self.getRSSFeed(searchURL)
|
||||
if not data:
|
||||
return []
|
||||
|
||||
if not data.startswith('<?xml'): # Error will be a single line of text
|
||||
logger.log(u"NZBs'R'US error: " + data, logger.ERROR)
|
||||
return []
|
||||
|
||||
root = etree.fromstring(data)
|
||||
if root is None:
|
||||
items = data.entries
|
||||
if not len(items) > 0:
|
||||
logger.log(u"Error trying to parse NZBS'R'US XML data.", logger.ERROR)
|
||||
logger.log(u"RSS data: " + data, logger.DEBUG)
|
||||
return []
|
||||
return root.findall('./results/result')
|
||||
|
||||
def _get_title_and_url(self, element):
|
||||
if element.find('title'): # RSS feed
|
||||
title = element.find('title').text
|
||||
url = element.find('link').text.replace('&', '&')
|
||||
return items
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
if item.title: # RSS feed
|
||||
title = item.title
|
||||
url = item.link
|
||||
else: # API item
|
||||
title = element.find('name').text
|
||||
nzbID = element.find('id').text
|
||||
key = element.find('key').text
|
||||
title = item.name
|
||||
nzbID = item.id
|
||||
key = item.key
|
||||
url = self.url + 'nzbdownload_rss.php' + '/' + \
|
||||
nzbID + '/' + sickbeard.NZBSRUS_UID + '/' + key + '/'
|
||||
return (title, url)
|
||||
|
@ -111,7 +107,7 @@ class NZBsRUSCache(tvcache.TVCache):
|
|||
url += urllib.urlencode(urlArgs)
|
||||
logger.log(u"NZBs'R'US cache update URL: " + url, logger.DEBUG)
|
||||
|
||||
data = self.provider.getURL(url)
|
||||
data = self.provider.getRSSFeed(url)
|
||||
return data
|
||||
|
||||
def _checkAuth(self, data):
|
||||
|
|
|
@ -64,7 +64,6 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
if is_XML:
|
||||
# provider doesn't return xml on error
|
||||
return True
|
||||
|
||||
else:
|
||||
parsedJSON = parsed_data
|
||||
|
||||
|
@ -112,23 +111,17 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
search_url = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode(params)
|
||||
logger.log(u"Search url: " + search_url, logger.DEBUG)
|
||||
|
||||
data = self.getURL(search_url)
|
||||
data = self.getURL(search_url, json=True)
|
||||
|
||||
if not data:
|
||||
logger.log(u"No data returned from " + search_url, logger.ERROR)
|
||||
return []
|
||||
|
||||
parsedJSON = helpers.parse_json(data)
|
||||
|
||||
if parsedJSON is None:
|
||||
logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR)
|
||||
return []
|
||||
|
||||
if self._checkAuthFromData(parsedJSON, is_XML=False):
|
||||
if self._checkAuthFromData(data, is_XML=False):
|
||||
|
||||
results = []
|
||||
|
||||
for item in parsedJSON:
|
||||
for item in data:
|
||||
if 'release' in item and 'getnzb' in item:
|
||||
results.append(item)
|
||||
|
||||
|
@ -171,7 +164,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
|
|||
|
||||
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
|
||||
|
||||
data = self.provider.getURL(rss_url)
|
||||
data = self.provider.getRSSFeed(rss_url)
|
||||
|
||||
if not data:
|
||||
logger.log(u"No data returned from " + rss_url, logger.ERROR)
|
||||
|
|
|
@ -59,7 +59,7 @@ class TvTorrentsProvider(generic.TorrentProvider):
|
|||
if data is None:
|
||||
return self._checkAuth()
|
||||
|
||||
description_text = data.description
|
||||
description_text = data.feed.title
|
||||
|
||||
if "User can't be found" in description_text or "Invalid Hash" in description_text:
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text),
|
||||
|
|
Loading…
Reference in a new issue