mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-11 05:33:37 +00:00
Merge pull request #448 from JackDandy/feature/ChangeProvRSS
Change provider RSS torrent code to use General Config/Advanced/Proxy…
This commit is contained in:
commit
9e231a69bb
6 changed files with 129 additions and 125 deletions
|
@ -29,6 +29,7 @@
|
||||||
* Change provider TD login process to use General Config/Advanced/Proxy host setting
|
* Change provider TD login process to use General Config/Advanced/Proxy host setting
|
||||||
* Change provider BTS login process to use General Config/Advanced/Proxy host setting
|
* Change provider BTS login process to use General Config/Advanced/Proxy host setting
|
||||||
* Change provider FSH login process to use General Config/Advanced/Proxy host setting
|
* Change provider FSH login process to use General Config/Advanced/Proxy host setting
|
||||||
|
* Change provider RSS torrent code to use General Config/Advanced/Proxy host setting, simplify and PEP8
|
||||||
* Change provider Wombles's PEP8 and code convention cleanup
|
* Change provider Wombles's PEP8 and code convention cleanup
|
||||||
* Change provider Womble's use SSL
|
* Change provider Womble's use SSL
|
||||||
* Change provider KAT remove dead url
|
* Change provider KAT remove dead url
|
||||||
|
|
|
@ -1830,7 +1830,7 @@ def save_config():
|
||||||
new_config['Newznab']['newznab_data'] = NEWZNAB_DATA
|
new_config['Newznab']['newznab_data'] = NEWZNAB_DATA
|
||||||
|
|
||||||
new_config['TorrentRss'] = {}
|
new_config['TorrentRss'] = {}
|
||||||
new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.configStr() for x in torrentRssProviderList])
|
new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.config_str() for x in torrentRssProviderList])
|
||||||
|
|
||||||
new_config['GUI'] = {}
|
new_config['GUI'] = {}
|
||||||
new_config['GUI']['gui_name'] = GUI_NAME
|
new_config['GUI']['gui_name'] = GUI_NAME
|
||||||
|
|
|
@ -15,35 +15,38 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import sickbeard
|
from . import generic
|
||||||
import generic
|
from sickbeard import logger, tvcache
|
||||||
from sickbeard import helpers, logger, tvcache
|
from sickbeard.rssfeeds import RSSFeeds
|
||||||
from sickbeard import encodingKludge as ek
|
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
from lib import requests
|
|
||||||
from lib.bencode import bdecode
|
from lib.bencode import bdecode
|
||||||
|
|
||||||
|
|
||||||
class TorrentRssProvider(generic.TorrentProvider):
|
class TorrentRssProvider(generic.TorrentProvider):
|
||||||
def __init__(self, name, url, cookies='', search_mode='eponly', search_fallback=False, enable_recentsearch=False,
|
|
||||||
enable_backlog=False):
|
def __init__(self, name, url, cookies='', search_mode='eponly', search_fallback=False,
|
||||||
generic.TorrentProvider.__init__(self, name, False, False)
|
enable_recentsearch=False, enable_backlog=False):
|
||||||
self.cache = TorrentRssCache(self)
|
generic.TorrentProvider.__init__(self, name)
|
||||||
self.url = re.sub('\/$', '', url)
|
|
||||||
self.url = url
|
self.url = url.rstrip('/')
|
||||||
self.ratio = None
|
|
||||||
self.search_mode = search_mode
|
|
||||||
self.search_fallback = search_fallback
|
|
||||||
self.enable_recentsearch = enable_recentsearch
|
|
||||||
self.enable_backlog = enable_backlog
|
|
||||||
self.cookies = cookies
|
self.cookies = cookies
|
||||||
|
|
||||||
def configStr(self):
|
self.enable_recentsearch = enable_recentsearch
|
||||||
return "%s|%s|%s|%d|%s|%d|%d|%d" % (self.name or '',
|
self.enable_backlog = enable_backlog
|
||||||
|
self.search_mode = search_mode
|
||||||
|
self.search_fallback = search_fallback
|
||||||
|
|
||||||
|
self.feeder = RSSFeeds(self)
|
||||||
|
self.cache = TorrentRssCache(self)
|
||||||
|
|
||||||
|
def imageName(self):
|
||||||
|
|
||||||
|
return generic.GenericProvider.imageName(self, 'torrentrss')
|
||||||
|
|
||||||
|
def config_str(self):
|
||||||
|
return '%s|%s|%s|%d|%s|%d|%d|%d' % (self.name or '',
|
||||||
self.url or '',
|
self.url or '',
|
||||||
self.cookies or '',
|
self.cookies or '',
|
||||||
self.enabled,
|
self.enabled,
|
||||||
|
@ -52,22 +55,12 @@ class TorrentRssProvider(generic.TorrentProvider):
|
||||||
self.enable_recentsearch,
|
self.enable_recentsearch,
|
||||||
self.enable_backlog)
|
self.enable_backlog)
|
||||||
|
|
||||||
def imageName(self):
|
|
||||||
if ek.ek(os.path.isfile,
|
|
||||||
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers',
|
|
||||||
self.getID() + '.png')):
|
|
||||||
return self.getID() + '.png'
|
|
||||||
return 'torrentrss.png'
|
|
||||||
|
|
||||||
def _get_title_and_url(self, item):
|
def _get_title_and_url(self, item):
|
||||||
|
|
||||||
title, url = None, None
|
title, url = None, None
|
||||||
|
|
||||||
title = item.title
|
if item.title:
|
||||||
|
title = re.sub(r'\s+', '.', u'' + item.title)
|
||||||
if title:
|
|
||||||
title = u'' + title
|
|
||||||
title = title.replace(' ', '.')
|
|
||||||
|
|
||||||
attempt_list = [lambda: item.torrent_magneturi,
|
attempt_list = [lambda: item.torrent_magneturi,
|
||||||
|
|
||||||
|
@ -82,83 +75,58 @@ class TorrentRssProvider(generic.TorrentProvider):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if title and url:
|
if title and url:
|
||||||
return (title, url)
|
break
|
||||||
|
|
||||||
return (title, url)
|
return title, url
|
||||||
|
|
||||||
def validateRSS(self):
|
def validate_feed(self):
|
||||||
|
|
||||||
|
succ, err_msg = self.feeder.check_cookie()
|
||||||
|
if not succ:
|
||||||
|
return succ, err_msg
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.cookies:
|
items = self.get_cache_data()
|
||||||
cookie_validator = re.compile("^(\w+=\w+)(;\w+=\w+)*$")
|
|
||||||
if not cookie_validator.match(self.cookies):
|
|
||||||
return (False, 'Cookie is not correctly formatted: ' + self.cookies)
|
|
||||||
|
|
||||||
items = self.cache._getRSSData()
|
for item in items:
|
||||||
|
title, url = self._get_title_and_url(item)
|
||||||
if not len(items) > 0:
|
if not (title and url):
|
||||||
return (False, 'No items found in the RSS feed ' + self.url)
|
continue
|
||||||
|
if url.startswith('magnet:'):
|
||||||
(title, url) = self._get_title_and_url(items[0])
|
if re.search('urn:btih:([0-9a-f]{32,40})', url):
|
||||||
|
break
|
||||||
if not title:
|
|
||||||
return (False, 'Unable to get title from first item')
|
|
||||||
|
|
||||||
if not url:
|
|
||||||
return (False, 'Unable to get torrent url from first item')
|
|
||||||
|
|
||||||
if url.startswith('magnet:') and re.search('urn:btih:([\w]{32,40})', url):
|
|
||||||
return (True, 'RSS feed Parsed correctly')
|
|
||||||
else:
|
else:
|
||||||
if self.cookies:
|
|
||||||
requests.utils.add_dict_to_cookiejar(self.session.cookies,
|
|
||||||
dict(x.rsplit('=', 1) for x in (self.cookies.split(';'))))
|
|
||||||
torrent_file = self.getURL(url)
|
torrent_file = self.getURL(url)
|
||||||
try:
|
try:
|
||||||
bdecode(torrent_file)
|
bdecode(torrent_file)
|
||||||
except Exception as e:
|
break
|
||||||
self.dumpHTML(torrent_file)
|
except Exception:
|
||||||
return (False, 'Torrent link is not a valid torrent file: ' + ex(e))
|
pass
|
||||||
|
else:
|
||||||
|
return False, '%s fetched RSS feed data: %s' % \
|
||||||
|
(('Fail to validate', 'No items found in the')[0 == len(items)], self.url)
|
||||||
|
|
||||||
return (True, 'RSS feed Parsed correctly')
|
return True, None
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return (False, 'Error when trying to load RSS: ' + ex(e))
|
return False, 'Error when trying to load RSS: ' + ex(e)
|
||||||
|
|
||||||
def dumpHTML(self, data):
|
def get_cache_data(self):
|
||||||
|
|
||||||
dumpName = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html')
|
logger.log(u'TorrentRssCache cache update URL: ' + self.url, logger.DEBUG)
|
||||||
|
|
||||||
try:
|
data = self.feeder.get_feed(self.url)
|
||||||
fileOut = open(dumpName, 'wb')
|
|
||||||
fileOut.write(data)
|
|
||||||
fileOut.close()
|
|
||||||
helpers.chmodAsParent(dumpName)
|
|
||||||
except IOError as e:
|
|
||||||
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
|
|
||||||
return False
|
|
||||||
logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.MESSAGE)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def seedRatio(self):
|
return [] if not (data and 'entries' in data) else data.entries
|
||||||
return self.ratio
|
|
||||||
|
|
||||||
|
|
||||||
class TorrentRssCache(tvcache.TVCache):
|
class TorrentRssCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
|
||||||
self.minTime = 15
|
self.minTime = 15
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _getRSSData(self):
|
||||||
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
|
|
||||||
|
|
||||||
request_headers = None
|
return self.provider.get_cache_data()
|
||||||
if self.provider.cookies:
|
|
||||||
request_headers = {'Cookie': self.provider.cookies}
|
|
||||||
|
|
||||||
data = self.getRSSFeed(self.provider.url, request_headers=request_headers)
|
|
||||||
|
|
||||||
if data and 'entries' in data:
|
|
||||||
return data.entries
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
|
@ -1,26 +1,66 @@
|
||||||
import urllib
|
# coding=utf-8
|
||||||
import urlparse
|
#
|
||||||
import re
|
# This file is part of SickGear.
|
||||||
from feedparser import feedparser
|
#
|
||||||
import sickbeard
|
|
||||||
|
|
||||||
from sickbeard import logger
|
import re
|
||||||
|
import requests
|
||||||
|
import requests.cookies
|
||||||
|
from feedparser import feedparser
|
||||||
|
|
||||||
|
from sickbeard import helpers, logger
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
def getFeed(url, post_data=None, request_headers=None):
|
|
||||||
parsed = list(urlparse.urlparse(url))
|
|
||||||
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
|
|
||||||
|
|
||||||
if post_data:
|
class RSSFeeds:
|
||||||
url += urllib.urlencode(post_data)
|
|
||||||
|
def __init__(self, provider=None):
|
||||||
|
|
||||||
|
self.provider = provider
|
||||||
|
|
||||||
|
def _check_auth_cookie(self):
|
||||||
|
|
||||||
|
if self.provider and hasattr(self.provider, 'cookies'):
|
||||||
|
cookies = self.provider.cookies
|
||||||
|
|
||||||
|
if not re.match('^(\w+=\w+[;\s]*)+$', cookies):
|
||||||
|
return False
|
||||||
|
|
||||||
|
cj = requests.utils.add_dict_to_cookiejar(self.provider.session.cookies,
|
||||||
|
dict([x.strip().split('=') for x in cookies.split(';')
|
||||||
|
if x != ''])),
|
||||||
|
for item in cj:
|
||||||
|
if not isinstance(item, requests.cookies.RequestsCookieJar):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def check_cookie(self):
|
||||||
|
|
||||||
|
if self._check_auth_cookie():
|
||||||
|
return True, None
|
||||||
|
|
||||||
|
return False, 'Cookies not correctly formatted key=value pairs e.g. uid=xx;pass=yy): ' + self.provider.cookies
|
||||||
|
|
||||||
|
def get_feed(self, url, request_headers=None):
|
||||||
|
|
||||||
|
if not self._check_auth_cookie():
|
||||||
|
return
|
||||||
|
|
||||||
|
session = None
|
||||||
|
if self.provider and hasattr(self.provider, 'session'):
|
||||||
|
session = self.provider.session
|
||||||
|
|
||||||
|
response = helpers.getURL(url, headers=request_headers, session=session)
|
||||||
|
if not response:
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
feed = feedparser.parse(url, False, False, request_headers)
|
feed = feedparser.parse(response)
|
||||||
|
if feed and 'entries' in feed:
|
||||||
if feed:
|
|
||||||
if 'entries' in feed:
|
|
||||||
return feed
|
return feed
|
||||||
elif 'error' in feed.feed:
|
|
||||||
|
if feed and 'error' in feed.feed:
|
||||||
err_code = feed.feed['error']['code']
|
err_code = feed.feed['error']['code']
|
||||||
err_desc = feed.feed['error']['description']
|
err_desc = feed.feed['error']['description']
|
||||||
logger.log(u'RSS ERROR:[%s] CODE:[%s]' % (err_desc, err_code), logger.DEBUG)
|
logger.log(u'RSS ERROR:[%s] CODE:[%s]' % (err_desc, err_code), logger.DEBUG)
|
||||||
|
|
|
@ -29,7 +29,7 @@ from sickbeard.common import Quality
|
||||||
from sickbeard import helpers, show_name_helpers
|
from sickbeard import helpers, show_name_helpers
|
||||||
from sickbeard.exceptions import AuthException, ex
|
from sickbeard.exceptions import AuthException, ex
|
||||||
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||||
from sickbeard.rssfeeds import getFeed
|
from sickbeard.rssfeeds import RSSFeeds
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
class CacheDBConnection(db.DBConnection):
|
class CacheDBConnection(db.DBConnection):
|
||||||
|
@ -44,7 +44,7 @@ class CacheDBConnection(db.DBConnection):
|
||||||
if str(e) != 'table lastUpdate already exists':
|
if str(e) != 'table lastUpdate already exists':
|
||||||
raise
|
raise
|
||||||
|
|
||||||
class TVCache():
|
class TVCache:
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
self.provider = provider
|
self.provider = provider
|
||||||
|
@ -107,8 +107,8 @@ class TVCache():
|
||||||
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def getRSSFeed(self, url, post_data=None, request_headers=None):
|
def getRSSFeed(self, url):
|
||||||
return getFeed(url, post_data, request_headers)
|
return RSSFeeds(self.provider).get_feed(url)
|
||||||
|
|
||||||
def _translateTitle(self, title):
|
def _translateTitle(self, title):
|
||||||
return u'' + title.replace(' ', '.')
|
return u'' + title.replace(' ', '.')
|
||||||
|
@ -133,7 +133,6 @@ class TVCache():
|
||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _getLastUpdate(self):
|
def _getLastUpdate(self):
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
sqlResults = myDB.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID])
|
sqlResults = myDB.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID])
|
||||||
|
@ -160,7 +159,6 @@ class TVCache():
|
||||||
|
|
||||||
return datetime.datetime.fromtimestamp(lastTime)
|
return datetime.datetime.fromtimestamp(lastTime)
|
||||||
|
|
||||||
|
|
||||||
def setLastUpdate(self, toDate=None):
|
def setLastUpdate(self, toDate=None):
|
||||||
if not toDate:
|
if not toDate:
|
||||||
toDate = datetime.datetime.today()
|
toDate = datetime.datetime.today()
|
||||||
|
@ -250,7 +248,6 @@ class TVCache():
|
||||||
'INSERT OR IGNORE INTO provider_cache (provider, name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?,?)',
|
'INSERT OR IGNORE INTO provider_cache (provider, name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?,?)',
|
||||||
[self.providerID, name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
|
[self.providerID, name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
|
||||||
|
|
||||||
|
|
||||||
def searchCache(self, episode, manualSearch=False):
|
def searchCache(self, episode, manualSearch=False):
|
||||||
neededEps = self.findNeededEpisodes(episode, manualSearch)
|
neededEps = self.findNeededEpisodes(episode, manualSearch)
|
||||||
if len(neededEps) > 0:
|
if len(neededEps) > 0:
|
||||||
|
@ -267,7 +264,6 @@ class TVCache():
|
||||||
|
|
||||||
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql, [self.providerID]))
|
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql, [self.providerID]))
|
||||||
|
|
||||||
|
|
||||||
def findNeededEpisodes(self, episode, manualSearch=False):
|
def findNeededEpisodes(self, episode, manualSearch=False):
|
||||||
neededEps = {}
|
neededEps = {}
|
||||||
cl = []
|
cl = []
|
||||||
|
@ -280,8 +276,8 @@ class TVCache():
|
||||||
else:
|
else:
|
||||||
for epObj in episode:
|
for epObj in episode:
|
||||||
cl.append([
|
cl.append([
|
||||||
'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ? '
|
'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ?'
|
||||||
'AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')',
|
+ ' AND episodes LIKE ? AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')',
|
||||||
[self.providerID, epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']])
|
[self.providerID, epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']])
|
||||||
sqlResults = myDB.mass_action(cl)
|
sqlResults = myDB.mass_action(cl)
|
||||||
if sqlResults:
|
if sqlResults:
|
||||||
|
@ -354,4 +350,3 @@ class TVCache():
|
||||||
self.setLastSearch()
|
self.setLastSearch()
|
||||||
|
|
||||||
return neededEps
|
return neededEps
|
||||||
|
|
||||||
|
|
|
@ -4014,7 +4014,7 @@ class ConfigProviders(Config):
|
||||||
if tempProvider.getID() in providerDict:
|
if tempProvider.getID() in providerDict:
|
||||||
return json.dumps({'error': 'Exists as ' + providerDict[tempProvider.getID()].name})
|
return json.dumps({'error': 'Exists as ' + providerDict[tempProvider.getID()].name})
|
||||||
else:
|
else:
|
||||||
(succ, errMsg) = tempProvider.validateRSS()
|
(succ, errMsg) = tempProvider.validate_feed()
|
||||||
if succ:
|
if succ:
|
||||||
return json.dumps({'success': tempProvider.getID()})
|
return json.dumps({'success': tempProvider.getID()})
|
||||||
else:
|
else:
|
||||||
|
@ -4032,12 +4032,12 @@ class ConfigProviders(Config):
|
||||||
providerDict[name].url = config.clean_url(url)
|
providerDict[name].url = config.clean_url(url)
|
||||||
providerDict[name].cookies = cookies
|
providerDict[name].cookies = cookies
|
||||||
|
|
||||||
return providerDict[name].getID() + '|' + providerDict[name].configStr()
|
return providerDict[name].getID() + '|' + providerDict[name].config_str()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
newProvider = rsstorrent.TorrentRssProvider(name, url, cookies)
|
newProvider = rsstorrent.TorrentRssProvider(name, url, cookies)
|
||||||
sickbeard.torrentRssProviderList.append(newProvider)
|
sickbeard.torrentRssProviderList.append(newProvider)
|
||||||
return newProvider.getID() + '|' + newProvider.configStr()
|
return newProvider.getID() + '|' + newProvider.config_str()
|
||||||
|
|
||||||
def deleteTorrentRssProvider(self, id):
|
def deleteTorrentRssProvider(self, id):
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue