Merge pull request #448 from JackDandy/feature/ChangeProvRSS

Change provider RSS torrent code to use General Config/Advanced/Proxy…
This commit is contained in:
JackDandy 2015-07-03 17:09:39 +01:00
commit 9e231a69bb
6 changed files with 129 additions and 125 deletions

View file

@ -29,6 +29,7 @@
* Change provider TD login process to use General Config/Advanced/Proxy host setting
* Change provider BTS login process to use General Config/Advanced/Proxy host setting
* Change provider FSH login process to use General Config/Advanced/Proxy host setting
* Change provider RSS torrent code to use General Config/Advanced/Proxy host setting, simplify and PEP8
* Change provider Wombles's PEP8 and code convention cleanup
* Change provider Womble's use SSL
* Change provider KAT remove dead url

View file

@ -1830,7 +1830,7 @@ def save_config():
new_config['Newznab']['newznab_data'] = NEWZNAB_DATA
new_config['TorrentRss'] = {}
new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.configStr() for x in torrentRssProviderList])
new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.config_str() for x in torrentRssProviderList])
new_config['GUI'] = {}
new_config['GUI']['gui_name'] = GUI_NAME

View file

@ -15,35 +15,38 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import sickbeard
import generic
from sickbeard import helpers, logger, tvcache
from sickbeard import encodingKludge as ek
from . import generic
from sickbeard import logger, tvcache
from sickbeard.rssfeeds import RSSFeeds
from sickbeard.exceptions import ex
from lib import requests
from lib.bencode import bdecode
class TorrentRssProvider(generic.TorrentProvider):
def __init__(self, name, url, cookies='', search_mode='eponly', search_fallback=False, enable_recentsearch=False,
enable_backlog=False):
generic.TorrentProvider.__init__(self, name, False, False)
self.cache = TorrentRssCache(self)
self.url = re.sub('\/$', '', url)
self.url = url
self.ratio = None
self.search_mode = search_mode
self.search_fallback = search_fallback
self.enable_recentsearch = enable_recentsearch
self.enable_backlog = enable_backlog
def __init__(self, name, url, cookies='', search_mode='eponly', search_fallback=False,
enable_recentsearch=False, enable_backlog=False):
generic.TorrentProvider.__init__(self, name)
self.url = url.rstrip('/')
self.cookies = cookies
def configStr(self):
return "%s|%s|%s|%d|%s|%d|%d|%d" % (self.name or '',
self.enable_recentsearch = enable_recentsearch
self.enable_backlog = enable_backlog
self.search_mode = search_mode
self.search_fallback = search_fallback
self.feeder = RSSFeeds(self)
self.cache = TorrentRssCache(self)
def imageName(self):
return generic.GenericProvider.imageName(self, 'torrentrss')
def config_str(self):
return '%s|%s|%s|%d|%s|%d|%d|%d' % (self.name or '',
self.url or '',
self.cookies or '',
self.enabled,
@ -52,22 +55,12 @@ class TorrentRssProvider(generic.TorrentProvider):
self.enable_recentsearch,
self.enable_backlog)
def imageName(self):
if ek.ek(os.path.isfile,
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers',
self.getID() + '.png')):
return self.getID() + '.png'
return 'torrentrss.png'
def _get_title_and_url(self, item):
title, url = None, None
title = item.title
if title:
title = u'' + title
title = title.replace(' ', '.')
if item.title:
title = re.sub(r'\s+', '.', u'' + item.title)
attempt_list = [lambda: item.torrent_magneturi,
@ -82,83 +75,58 @@ class TorrentRssProvider(generic.TorrentProvider):
continue
if title and url:
return (title, url)
break
return (title, url)
return title, url
def validateRSS(self):
def validate_feed(self):
succ, err_msg = self.feeder.check_cookie()
if not succ:
return succ, err_msg
try:
if self.cookies:
cookie_validator = re.compile("^(\w+=\w+)(;\w+=\w+)*$")
if not cookie_validator.match(self.cookies):
return (False, 'Cookie is not correctly formatted: ' + self.cookies)
items = self.get_cache_data()
items = self.cache._getRSSData()
if not len(items) > 0:
return (False, 'No items found in the RSS feed ' + self.url)
(title, url) = self._get_title_and_url(items[0])
if not title:
return (False, 'Unable to get title from first item')
if not url:
return (False, 'Unable to get torrent url from first item')
if url.startswith('magnet:') and re.search('urn:btih:([\w]{32,40})', url):
return (True, 'RSS feed Parsed correctly')
for item in items:
title, url = self._get_title_and_url(item)
if not (title and url):
continue
if url.startswith('magnet:'):
if re.search('urn:btih:([0-9a-f]{32,40})', url):
break
else:
if self.cookies:
requests.utils.add_dict_to_cookiejar(self.session.cookies,
dict(x.rsplit('=', 1) for x in (self.cookies.split(';'))))
torrent_file = self.getURL(url)
try:
bdecode(torrent_file)
except Exception as e:
self.dumpHTML(torrent_file)
return (False, 'Torrent link is not a valid torrent file: ' + ex(e))
break
except Exception:
pass
else:
return False, '%s fetched RSS feed data: %s' % \
(('Fail to validate', 'No items found in the')[0 == len(items)], self.url)
return (True, 'RSS feed Parsed correctly')
return True, None
except Exception as e:
return (False, 'Error when trying to load RSS: ' + ex(e))
return False, 'Error when trying to load RSS: ' + ex(e)
def dumpHTML(self, data):
def get_cache_data(self):
dumpName = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html')
logger.log(u'TorrentRssCache cache update URL: ' + self.url, logger.DEBUG)
try:
fileOut = open(dumpName, 'wb')
fileOut.write(data)
fileOut.close()
helpers.chmodAsParent(dumpName)
except IOError as e:
logger.log("Unable to save the file: " + ex(e), logger.ERROR)
return False
logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.MESSAGE)
return True
data = self.feeder.get_feed(self.url)
def seedRatio(self):
return self.ratio
return [] if not (data and 'entries' in data) else data.entries
class TorrentRssCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
self.minTime = 15
def _getRSSData(self):
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
request_headers = None
if self.provider.cookies:
request_headers = {'Cookie': self.provider.cookies}
data = self.getRSSFeed(self.provider.url, request_headers=request_headers)
if data and 'entries' in data:
return data.entries
else:
return []
return self.provider.get_cache_data()

View file

@ -1,26 +1,66 @@
import urllib
import urlparse
import re
from feedparser import feedparser
import sickbeard
# coding=utf-8
#
# This file is part of SickGear.
#
from sickbeard import logger
import re
import requests
import requests.cookies
from feedparser import feedparser
from sickbeard import helpers, logger
from sickbeard.exceptions import ex
def getFeed(url, post_data=None, request_headers=None):
parsed = list(urlparse.urlparse(url))
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
if post_data:
url += urllib.urlencode(post_data)
class RSSFeeds:
def __init__(self, provider=None):
self.provider = provider
def _check_auth_cookie(self):
if self.provider and hasattr(self.provider, 'cookies'):
cookies = self.provider.cookies
if not re.match('^(\w+=\w+[;\s]*)+$', cookies):
return False
cj = requests.utils.add_dict_to_cookiejar(self.provider.session.cookies,
dict([x.strip().split('=') for x in cookies.split(';')
if x != ''])),
for item in cj:
if not isinstance(item, requests.cookies.RequestsCookieJar):
return False
return True
def check_cookie(self):
if self._check_auth_cookie():
return True, None
return False, 'Cookies not correctly formatted key=value pairs e.g. uid=xx;pass=yy): ' + self.provider.cookies
def get_feed(self, url, request_headers=None):
if not self._check_auth_cookie():
return
session = None
if self.provider and hasattr(self.provider, 'session'):
session = self.provider.session
response = helpers.getURL(url, headers=request_headers, session=session)
if not response:
return
try:
feed = feedparser.parse(url, False, False, request_headers)
if feed:
if 'entries' in feed:
feed = feedparser.parse(response)
if feed and 'entries' in feed:
return feed
elif 'error' in feed.feed:
if feed and 'error' in feed.feed:
err_code = feed.feed['error']['code']
err_desc = feed.feed['error']['description']
logger.log(u'RSS ERROR:[%s] CODE:[%s]' % (err_desc, err_code), logger.DEBUG)

View file

@ -29,7 +29,7 @@ from sickbeard.common import Quality
from sickbeard import helpers, show_name_helpers
from sickbeard.exceptions import AuthException, ex
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.rssfeeds import getFeed
from sickbeard.rssfeeds import RSSFeeds
import itertools
class CacheDBConnection(db.DBConnection):
@ -44,7 +44,7 @@ class CacheDBConnection(db.DBConnection):
if str(e) != 'table lastUpdate already exists':
raise
class TVCache():
class TVCache:
def __init__(self, provider):
self.provider = provider
@ -107,8 +107,8 @@ class TVCache():
return []
def getRSSFeed(self, url, post_data=None, request_headers=None):
return getFeed(url, post_data, request_headers)
def getRSSFeed(self, url):
return RSSFeeds(self.provider).get_feed(url)
def _translateTitle(self, title):
return u'' + title.replace(' ', '.')
@ -133,7 +133,6 @@ class TVCache():
logger.DEBUG)
return None
def _getLastUpdate(self):
myDB = self._getDB()
sqlResults = myDB.select('SELECT time FROM lastUpdate WHERE provider = ?', [self.providerID])
@ -160,7 +159,6 @@ class TVCache():
return datetime.datetime.fromtimestamp(lastTime)
def setLastUpdate(self, toDate=None):
if not toDate:
toDate = datetime.datetime.today()
@ -250,7 +248,6 @@ class TVCache():
'INSERT OR IGNORE INTO provider_cache (provider, name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?,?)',
[self.providerID, name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def searchCache(self, episode, manualSearch=False):
neededEps = self.findNeededEpisodes(episode, manualSearch)
if len(neededEps) > 0:
@ -267,7 +264,6 @@ class TVCache():
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql, [self.providerID]))
def findNeededEpisodes(self, episode, manualSearch=False):
neededEps = {}
cl = []
@ -280,8 +276,8 @@ class TVCache():
else:
for epObj in episode:
cl.append([
'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ? '
'AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')',
'SELECT * FROM provider_cache WHERE provider = ? AND indexerid = ? AND season = ?'
+ ' AND episodes LIKE ? AND quality IN (' + ','.join([str(x) for x in epObj.wantedQuality]) + ')',
[self.providerID, epObj.show.indexerid, epObj.season, '%|' + str(epObj.episode) + '|%']])
sqlResults = myDB.mass_action(cl)
if sqlResults:
@ -354,4 +350,3 @@ class TVCache():
self.setLastSearch()
return neededEps

View file

@ -4014,7 +4014,7 @@ class ConfigProviders(Config):
if tempProvider.getID() in providerDict:
return json.dumps({'error': 'Exists as ' + providerDict[tempProvider.getID()].name})
else:
(succ, errMsg) = tempProvider.validateRSS()
(succ, errMsg) = tempProvider.validate_feed()
if succ:
return json.dumps({'success': tempProvider.getID()})
else:
@ -4032,12 +4032,12 @@ class ConfigProviders(Config):
providerDict[name].url = config.clean_url(url)
providerDict[name].cookies = cookies
return providerDict[name].getID() + '|' + providerDict[name].configStr()
return providerDict[name].getID() + '|' + providerDict[name].config_str()
else:
newProvider = rsstorrent.TorrentRssProvider(name, url, cookies)
sickbeard.torrentRssProviderList.append(newProvider)
return newProvider.getID() + '|' + newProvider.configStr()
return newProvider.getID() + '|' + newProvider.config_str()
def deleteTorrentRssProvider(self, id):