diff --git a/gui/slick/images/providers/bitsoup.png b/gui/slick/images/providers/bitsoup.png
new file mode 100644
index 00000000..8d7c5eb1
Binary files /dev/null and b/gui/slick/images/providers/bitsoup.png differ
diff --git a/gui/slick/images/providers/freshontv.png b/gui/slick/images/providers/freshontv.png
new file mode 100755
index 00000000..9b15d1b7
Binary files /dev/null and b/gui/slick/images/providers/freshontv.png differ
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
old mode 100644
new mode 100755
index 914249ab..1aafdab8
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -32,7 +32,7 @@ import sys
from sickbeard import providers, metadata, config, webserveInit
from sickbeard.providers.generic import GenericProvider
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
- omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb
+ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, freshontv, bitsoup
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
naming_ep_type
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
old mode 100644
new mode 100755
index 47e52dcd..0076a0cf
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -34,7 +34,9 @@ __all__ = ['ezrss',
'nyaatorrents',
'fanzub',
'torrentbytes',
- 'animezb'
+ 'animezb',
+ 'freshontv',
+ 'bitsoup'
]
import sickbeard
diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py
new file mode 100644
index 00000000..eb27452d
--- /dev/null
+++ b/sickbeard/providers/bitsoup.py
@@ -0,0 +1,342 @@
+# Author: Idan Gutman
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of SickRage.
+#
+# SickRage is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickRage is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickRage. If not, see .
+
+import re
+import traceback
+import datetime
+import urlparse
+import time
+import sickbeard
+import generic
+from sickbeard.common import Quality, cpu_presets
+from sickbeard import logger
+from sickbeard import tvcache
+from sickbeard import db
+from sickbeard import classes
+from sickbeard import helpers
+from sickbeard import show_name_helpers
+from sickbeard.common import Overview
+from sickbeard.exceptions import ex
+from sickbeard import clients
+from lib import requests
+from lib.requests import exceptions
+from bs4 import BeautifulSoup
+from lib.unidecode import unidecode
+from sickbeard.helpers import sanitizeSceneName
+
+
+class BitSoupProvider(generic.TorrentProvider):
+ urls = {'base_url': 'https://www.bitsoup.me',
+ 'login': 'https://www.bitsoup.me/takelogin.php',
+ 'detail': 'https://www.bitsoup.me/details.php?id=%s',
+ 'search': 'https://www.bitsoup.me/browse.php?search=%s%s',
+ 'download': 'https://bitsoup.me/%s',
+ }
+
+ def __init__(self):
+
+ generic.TorrentProvider.__init__(self, "BitSoup")
+
+ self.supportsBacklog = True
+
+ self.enabled = False
+ self.username = None
+ self.password = None
+ self.ratio = None
+ self.minseed = None
+ self.minleech = None
+
+ self.cache = BitSoupCache(self)
+
+ self.url = self.urls['base_url']
+
+ self.categories = "&c42=1&c45=1&c49=1&c7=1"
+
+ def isEnabled(self):
+ return self.enabled
+
+ def imageName(self):
+ return 'bitsoup.png'
+
+ def getQuality(self, item, anime=False):
+
+ quality = Quality.sceneQuality(item[0], anime)
+ return quality
+
+ def _doLogin(self):
+
+ login_params = {'username': self.username,
+ 'password': self.password,
+ 'ssl': 'yes'
+ }
+
+ self.session = requests.Session()
+
+ try:
+ response = self.session.post(self.urls['login'], data=login_params, timeout=30)
+ except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
+ logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
+ return False
+
+ if re.search('Username or password incorrect', response.text):
+ logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+ return False
+
+ return True
+
+ def _get_season_search_strings(self, ep_obj):
+
+ search_string = {'Season': []}
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ if ep_obj.show.air_by_date or ep_obj.show.sports:
+ ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
+ elif ep_obj.show.anime:
+ ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
+ else:
+ ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
+
+ search_string['Season'].append(ep_string)
+
+ return [search_string]
+
+ def _get_episode_search_strings(self, ep_obj, add_string=''):
+
+ search_string = {'Episode': []}
+
+ if not ep_obj:
+ return []
+
+ if self.show.air_by_date:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ str(ep_obj.airdate).replace('-', '|')
+ search_string['Episode'].append(ep_string)
+ elif self.show.sports:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ str(ep_obj.airdate).replace('-', '|') + '|' + \
+ ep_obj.airdate.strftime('%b')
+ search_string['Episode'].append(ep_string)
+ elif self.show.anime:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ "%i" % int(ep_obj.scene_absolute_number)
+ search_string['Episode'].append(ep_string)
+ else:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
+ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
+ 'episodenumber': ep_obj.scene_episode}
+
+ search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
+
+ return [search_string]
+
+ def _doSearch(self, search_params, epcount=0, age=0):
+
+ results = []
+ items = {'Season': [], 'Episode': [], 'RSS': []}
+
+ if not self._doLogin():
+ return []
+
+ for mode in search_params.keys():
+ for search_string in search_params[mode]:
+
+ if isinstance(search_string, unicode):
+ search_string = unidecode(search_string)
+
+ searchURL = self.urls['search'] % (search_string, self.categories)
+
+ logger.log(u"Search string: " + searchURL, logger.DEBUG)
+
+ data = self.getURL(searchURL)
+ if not data:
+ continue
+
+ try:
+ html = BeautifulSoup(data, "html.parser")
+
+ torrent_table = html.find('table', attrs={'class': 'koptekst'})
+ torrent_rows = torrent_table.find_all('tr') if torrent_table else []
+
+ #Continue only if one Release is found
+ if len(torrent_rows) < 2:
+ logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
+ logger.DEBUG)
+ continue
+
+ for result in torrent_rows[1:]:
+ cells = result.find_all('td')
+
+ link = cells[1].find('a')
+ download_url = self.urls['download'] % cells[3].find('a')['href']
+
+ id = link['href']
+ id = id.replace('details.php?id=','')
+ id = id.replace('&hit=1', '')
+
+ try:
+ title = link.getText()
+ id = int(id)
+ seeders = int(cells[9].getText())
+ leechers = int(cells[10].getText())
+ except (AttributeError, TypeError):
+ continue
+
+ #Filter unseeded torrent
+ if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
+ continue
+
+ if not title or not download_url:
+ continue
+
+ item = title, download_url, id, seeders, leechers
+ logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
+
+ items[mode].append(item)
+
+ except Exception, e:
+ logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+
+ #For each search mode sort all the items by seeders
+ items[mode].sort(key=lambda tup: tup[3], reverse=True)
+
+ results += items[mode]
+
+ return results
+
+ def _get_title_and_url(self, item):
+
+ title, url, id, seeders, leechers = item
+
+ if url:
+ url = str(url).replace('&', '&')
+
+ return (title, url)
+
+ def getURL(self, url, post_data=None, headers=None, json=False):
+
+ if not self.session:
+ self._doLogin()
+
+ if not headers:
+ headers = []
+
+ try:
+ # Remove double-slashes from url
+ parsed = list(urlparse.urlparse(url))
+ parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
+ url = urlparse.urlunparse(parsed)
+
+ response = self.session.get(url, verify=False)
+ except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
+ logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
+ return None
+
+ if response.status_code != 200:
+ logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
+ response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
+ return None
+
+ return response.content
+
+ def findPropers(self, search_date=datetime.datetime.today()):
+
+ results = []
+
+ myDB = db.DBConnection()
+ sqlResults = myDB.select(
+ 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
+ ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
+ ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
+ ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
+ ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
+ )
+
+ if not sqlResults:
+ return []
+
+ for sqlshow in sqlResults:
+ self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
+ if self.show:
+ curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
+
+ searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
+
+ for item in self._doSearch(searchString[0]):
+ title, url = self._get_title_and_url(item)
+ results.append(classes.Proper(title, url, datetime.datetime.today()))
+
+ return results
+
+ def seedRatio(self):
+ return self.ratio
+
+
+class BitSoupCache(tvcache.TVCache):
+ def __init__(self, provider):
+
+ tvcache.TVCache.__init__(self, provider)
+
+ # only poll TorrentBytes every 20 minutes max
+ self.minTime = 20
+
+ def updateCache(self):
+
+ # delete anything older then 7 days
+ logger.log(u"Clearing " + self.provider.name + " cache")
+ self._clearCache()
+
+ if not self.shouldUpdate():
+ return
+
+ search_params = {'RSS': ['']}
+ rss_results = self.provider._doSearch(search_params)
+
+ if rss_results:
+ self.setLastUpdate()
+ else:
+ return []
+
+ cl = []
+ for result in rss_results:
+
+ item = (result[0], result[1])
+ ci = self._parseItem(item)
+ if ci is not None:
+ cl.append(ci)
+
+ if cl:
+ myDB = self._getDB()
+ myDB.mass_action(cl)
+
+
+ def _parseItem(self, item):
+
+ (title, url) = item
+
+ if not title or not url:
+ return None
+
+ logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG)
+
+ return self._addCacheEntry(title, url)
+
+
+provider = BitSoupProvider()
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
new file mode 100755
index 00000000..de740e2b
--- /dev/null
+++ b/sickbeard/providers/freshontv.py
@@ -0,0 +1,375 @@
+# Author: Idan Gutman
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of SickRage.
+#
+# SickRage is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickRage is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickRage. If not, see .
+
+import re
+import traceback
+import datetime
+import urlparse
+import time
+import sickbeard
+import generic
+from sickbeard.common import Quality, cpu_presets
+from sickbeard import logger
+from sickbeard import tvcache
+from sickbeard import db
+from sickbeard import classes
+from sickbeard import helpers
+from sickbeard import show_name_helpers
+from sickbeard.common import Overview
+from sickbeard.exceptions import ex
+from sickbeard import clients
+from lib import requests
+from lib.requests import exceptions
+from bs4 import BeautifulSoup
+from lib.unidecode import unidecode
+from sickbeard.helpers import sanitizeSceneName
+
+
+class FreshOnTVProvider(generic.TorrentProvider):
+ urls = {'base_url': 'http://freshon.tv/',
+ 'login': 'http://freshon.tv/login.php?action=makelogin',
+ 'detail': 'http://freshon.tv/details.php?id=%s',
+ 'search': 'http://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s',
+ 'download': 'http://freshon.tv/download.php?id=%s&type=torrent',
+ }
+
+ def __init__(self):
+
+ generic.TorrentProvider.__init__(self, "FreshOnTV")
+
+ self.supportsBacklog = True
+
+ self.enabled = False
+ self._uid = None
+ self._hash = None
+ self.username = None
+ self.password = None
+ self.ratio = None
+ self.minseed = None
+ self.minleech = None
+ self.freeleech = False
+
+ self.cache = FreshOnTVCache(self)
+
+ self.url = self.urls['base_url']
+ self.cookies = None
+
+ def isEnabled(self):
+ return self.enabled
+
+ def imageName(self):
+ return 'freshontv.png'
+
+ def getQuality(self, item, anime=False):
+
+ quality = Quality.sceneQuality(item[0], anime)
+ return quality
+
+ def _doLogin(self):
+ if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
+ return True
+
+ if self._uid and self._hash:
+
+ requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
+
+ else:
+ login_params = {'username': self.username,
+ 'password': self.password,
+ 'login': 'submit'
+ }
+
+ if not self.session:
+ self.session = requests.Session()
+
+ try:
+ response = self.session.post(self.urls['login'], data=login_params, timeout=30)
+ except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
+ logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
+ return False
+
+ if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response.text):
+ logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+ return False
+
+ if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
+ self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
+ self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
+
+ self.cookies = {'uid': self._uid,
+ 'pass': self._hash
+ }
+ return True
+ else:
+ logger.log(u'Unable to obtain cookie for FreshOnTV', logger.ERROR)
+ return False
+
+ def _get_season_search_strings(self, ep_obj):
+
+ search_string = {'Season': []}
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ if ep_obj.show.air_by_date or ep_obj.show.sports:
+ ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
+ elif ep_obj.show.anime:
+ ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
+ else:
+ ep_string = show_name + '.S%02d' % int(ep_obj.scene_season) #1) showName SXX
+
+ search_string['Season'].append(ep_string)
+
+ return [search_string]
+
+ def _get_episode_search_strings(self, ep_obj, add_string=''):
+
+ search_string = {'Episode': []}
+
+ if not ep_obj:
+ return []
+
+ if self.show.air_by_date:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ str(ep_obj.airdate).replace('-', '|')
+ search_string['Episode'].append(ep_string)
+ elif self.show.sports:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ str(ep_obj.airdate).replace('-', '|') + '|' + \
+ ep_obj.airdate.strftime('%b')
+ search_string['Episode'].append(ep_string)
+ elif self.show.anime:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ "%i" % int(ep_obj.scene_absolute_number)
+ search_string['Episode'].append(ep_string)
+ else:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
+ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
+ 'episodenumber': ep_obj.scene_episode}
+
+ search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
+
+ return [search_string]
+
+ def _doSearch(self, search_params, epcount=0, age=0):
+
+ results = []
+ items = {'Season': [], 'Episode': [], 'RSS': []}
+
+ freeleech = '3' if self.freeleech else '0'
+
+ if not self._doLogin():
+ return []
+
+
+ for mode in search_params.keys():
+ for search_string in search_params[mode]:
+
+ if isinstance(search_string, unicode):
+ search_string = unidecode(search_string)
+
+
+ searchURL = self.urls['search'] % (freeleech, search_string)
+
+ logger.log(u"Search string: " + searchURL, logger.DEBUG)
+
+ # returns top 15 results by default, expandable in user profile to 100
+ data = self.getURL(searchURL)
+ if not data:
+ continue
+
+ try:
+ html = BeautifulSoup(data, features=["html5lib", "permissive"])
+
+ torrent_table = html.find('table', attrs={'class': 'frame'})
+ torrent_rows = torrent_table.findChildren('tr') if torrent_table else []
+ #Continue only if one Release is found
+ if len(torrent_rows) < 2:
+ logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
+ logger.DEBUG)
+ continue
+
+ # skip colheader
+ for result in torrent_rows[1:]:
+ cells = result.findChildren('td')
+
+ link = cells[1].find('a', attrs = {'class': 'torrent_name_link'})
+ #skip if torrent has been nuked due to poor quality
+ if cells[1].find('img', alt='Nuked') != None:
+ continue
+
+ torrent_id = link['href'].replace('/details.php?id=', '')
+
+
+ try:
+ if link.has_key('title'):
+ title = cells[1].find('a', {'class': 'torrent_name_link'})['title']
+ else:
+ title = link.contents[0]
+ download_url = self.urls['download'] % (torrent_id)
+ id = int(torrent_id)
+
+ seeders = int(cells[8].find('a', {'class': 'link'}).span.contents[0].strip())
+ leechers = int(cells[9].find('a', {'class': 'link'}).contents[0].strip())
+ except (AttributeError, TypeError):
+ continue
+
+ #Filter unseeded torrent
+ if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
+ continue
+
+ if not title or not download_url:
+ continue
+
+ item = title, download_url, id, seeders, leechers
+ logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
+
+ items[mode].append(item)
+
+ except Exception, e:
+ logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+
+ #For each search mode sort all the items by seeders
+ items[mode].sort(key=lambda tup: tup[3], reverse=True)
+
+ results += items[mode]
+
+ return results
+
+ def _get_title_and_url(self, item):
+
+ title, url, id, seeders, leechers = item
+
+ if url:
+ url = str(url).replace('&', '&')
+
+ return (title, url)
+
+ def getURL(self, url, post_data=None, headers=None, json=False):
+
+ if not self.session:
+ self._doLogin()
+
+ if not headers:
+ headers = []
+
+ try:
+ # Remove double-slashes from url
+ parsed = list(urlparse.urlparse(url))
+ parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
+ url = urlparse.urlunparse(parsed)
+
+ response = self.session.get(url, verify=False)
+ except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
+ logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
+ return None
+
+ if response.status_code != 200:
+ logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
+ response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
+ return None
+
+ return response.content
+
+ def findPropers(self, search_date=datetime.datetime.today()):
+
+ results = []
+
+ myDB = db.DBConnection()
+ sqlResults = myDB.select(
+ 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
+ ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
+ ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
+ ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
+ ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
+ )
+
+ if not sqlResults:
+ return []
+
+ for sqlshow in sqlResults:
+ self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
+ if self.show:
+ curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
+
+ searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
+
+ for item in self._doSearch(searchString[0]):
+ title, url = self._get_title_and_url(item)
+ results.append(classes.Proper(title, url, datetime.datetime.today()))
+
+ return results
+
+ def seedRatio(self):
+ return self.ratio
+
+
+class FreshOnTVCache(tvcache.TVCache):
+ def __init__(self, provider):
+
+ tvcache.TVCache.__init__(self, provider)
+
+ # poll delay in minutes
+ self.minTime = 20
+
+ def updateCache(self):
+
+ # delete anything older then 7 days
+ logger.log(u"Clearing " + self.provider.name + " cache")
+ self._clearCache()
+
+ if not self.shouldUpdate():
+ return
+
+ search_params = {'RSS': ['']}
+ rss_results = self.provider._doSearch(search_params)
+
+ if rss_results:
+ self.setLastUpdate()
+ else:
+ return []
+
+ cl = []
+ for result in rss_results:
+
+ item = (result[0], result[1])
+ ci = self._parseItem(item)
+ if ci is not None:
+ cl.append(ci)
+
+
+
+ if cl:
+ myDB = self._getDB()
+ myDB.mass_action(cl)
+
+
+ def _parseItem(self, item):
+
+ (title, url) = item
+
+ if not title or not url:
+ return None
+
+ logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
+
+ return self._addCacheEntry(title, url)
+
+
+provider = FreshOnTVProvider()
\ No newline at end of file