2014-04-22 08:02:43 +00:00
|
|
|
# coding=utf-8
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2014-06-06 22:27:20 +00:00
|
|
|
# GNU General Public License for more details.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import os
|
|
|
|
import traceback
|
2014-03-20 10:24:58 +00:00
|
|
|
import urllib
|
2014-03-10 05:18:05 +00:00
|
|
|
import re
|
|
|
|
import datetime
|
|
|
|
import urlparse
|
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
import generic
|
2015-06-08 23:35:09 +00:00
|
|
|
from sickbeard.common import Quality, mediaExtensions
|
2014-07-06 13:11:04 +00:00
|
|
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
2015-06-08 23:35:09 +00:00
|
|
|
from sickbeard import logger, tvcache, helpers, db, classes
|
2014-03-10 05:18:05 +00:00
|
|
|
from sickbeard.show_name_helpers import allPossibleShowNames, sanitizeSceneName
|
2014-07-22 04:26:58 +00:00
|
|
|
from sickbeard.bs4_parser import BS4Parser
|
2014-03-10 05:18:05 +00:00
|
|
|
from lib.unidecode import unidecode
|
|
|
|
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
class KATProvider(generic.TorrentProvider):
|
2014-03-10 05:18:05 +00:00
|
|
|
def __init__(self):
|
2015-04-18 04:55:04 +00:00
|
|
|
generic.TorrentProvider.__init__(self, 'KickAssTorrents', True, False)
|
2015-06-08 23:35:09 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
self.confirmed = False
|
|
|
|
self.ratio = None
|
2014-05-20 16:06:11 +00:00
|
|
|
self.minseed = None
|
|
|
|
self.minleech = None
|
2015-06-08 23:35:09 +00:00
|
|
|
|
|
|
|
self.urls = ['https://kat.ph/', 'http://katproxy.com/']
|
|
|
|
|
|
|
|
self.url = self.urls[0]
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
self.cache = KATCache(self)
|
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
def getQuality(self, item, anime=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
quality = Quality.sceneQuality(item[0], anime)
|
2014-03-10 05:18:05 +00:00
|
|
|
return quality
|
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
@staticmethod
|
|
|
|
def _reverse_quality(quality):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
quality_string = ''
|
|
|
|
|
|
|
|
if quality == Quality.SDTV:
|
|
|
|
quality_string = 'HDTV x264'
|
|
|
|
if quality == Quality.SDDVD:
|
|
|
|
quality_string = 'DVDRIP'
|
|
|
|
elif quality == Quality.HDTV:
|
|
|
|
quality_string = '720p HDTV x264'
|
|
|
|
elif quality == Quality.FULLHDTV:
|
|
|
|
quality_string = '1080p HDTV x264'
|
|
|
|
elif quality == Quality.RAWHDTV:
|
|
|
|
quality_string = '1080i HDTV mpeg2'
|
|
|
|
elif quality == Quality.HDWEBDL:
|
|
|
|
quality_string = '720p WEB-DL h264'
|
|
|
|
elif quality == Quality.FULLHDWEBDL:
|
|
|
|
quality_string = '1080p WEB-DL h264'
|
|
|
|
elif quality == Quality.HDBLURAY:
|
|
|
|
quality_string = '720p Bluray x264'
|
|
|
|
elif quality == Quality.FULLHDBLURAY:
|
|
|
|
quality_string = '1080p Bluray x264'
|
|
|
|
|
|
|
|
return quality_string
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
def _find_season_quality(self, title, torrent_link, ep_number):
|
2014-03-10 05:18:05 +00:00
|
|
|
""" Return the modified title of a Season Torrent with the quality found inspecting torrent file list """
|
|
|
|
|
|
|
|
quality = Quality.UNKNOWN
|
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
file_name = None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
data = self.getURL(torrent_link)
|
|
|
|
if not data:
|
|
|
|
return None
|
|
|
|
|
|
|
|
try:
|
2015-06-08 23:35:09 +00:00
|
|
|
with BS4Parser(data, features=['html5lib', 'permissive']) as soup:
|
2014-07-22 04:26:58 +00:00
|
|
|
file_table = soup.find('table', attrs={'class': 'torrentFileList'})
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-22 04:26:58 +00:00
|
|
|
if not file_table:
|
|
|
|
return None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-22 04:26:58 +00:00
|
|
|
files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})]
|
2015-06-08 23:35:09 +00:00
|
|
|
video_files = filter(lambda i: i.rpartition('.')[2].lower() in mediaExtensions, files)
|
|
|
|
|
|
|
|
# Filtering SingleEpisode/MultiSeason Torrent
|
|
|
|
if len(video_files) < ep_number or len(video_files) > float(ep_number * 1.1):
|
|
|
|
logger.log(u'Result %s lists %s episodes with %s episodes retrieved in torrent'
|
|
|
|
% (title, ep_number, len(video_files)), logger.DEBUG)
|
|
|
|
logger.log(u'Result %s seem to be a single episode or multi-season torrent, skipping result...'
|
|
|
|
% title, logger.DEBUG)
|
2014-07-22 04:26:58 +00:00
|
|
|
return None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
if Quality.UNKNOWN != Quality.sceneQuality(title):
|
2014-07-22 04:26:58 +00:00
|
|
|
return title
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
for file_name in video_files:
|
|
|
|
quality = Quality.sceneQuality(os.path.basename(file_name))
|
|
|
|
if Quality.UNKNOWN != quality:
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
if None is not file_name and Quality.UNKNOWN == quality:
|
|
|
|
quality = Quality.assumeQuality(os.path.basename(file_name))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
if Quality.UNKNOWN == quality:
|
|
|
|
logger.log(u'Unable to obtain a Season Quality for ' + title, logger.DEBUG)
|
2014-07-22 04:26:58 +00:00
|
|
|
return None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-22 04:26:58 +00:00
|
|
|
try:
|
2015-06-08 23:35:09 +00:00
|
|
|
my_parser = NameParser(showObj=self.show)
|
|
|
|
parse_result = my_parser.parse(file_name)
|
2014-07-22 04:26:58 +00:00
|
|
|
except (InvalidNameException, InvalidShowException):
|
|
|
|
return None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
logger.log(u'Season quality for %s is %s' % (title, Quality.qualityStrings[quality]), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-22 04:26:58 +00:00
|
|
|
if parse_result.series_name and parse_result.season_number:
|
2015-06-08 23:35:09 +00:00
|
|
|
title = parse_result.series_name + ' S%02d %s' % (int(parse_result.season_number),
|
|
|
|
self._reverse_quality(quality))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-22 04:26:58 +00:00
|
|
|
return title
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
except Exception:
|
|
|
|
logger.log(u'Failed to quality parse ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-04-30 13:49:50 +00:00
|
|
|
def _get_season_search_strings(self, ep_obj):
|
2014-05-07 07:50:49 +00:00
|
|
|
search_string = {'Season': []}
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-06-06 23:55:14 +00:00
|
|
|
for show_name in set(allPossibleShowNames(self.show)):
|
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
|
|
ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
|
|
|
|
search_string['Season'].append(ep_string)
|
|
|
|
ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0]
|
|
|
|
search_string['Season'].append(ep_string)
|
|
|
|
elif ep_obj.show.anime:
|
2015-06-08 23:35:09 +00:00
|
|
|
ep_string = show_name + ' ' + '%02d' % ep_obj.scene_absolute_number
|
2014-06-06 23:55:14 +00:00
|
|
|
search_string['Season'].append(ep_string)
|
|
|
|
else:
|
|
|
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) + ' -S%02d' % int(
|
2015-06-08 23:35:09 +00:00
|
|
|
ep_obj.scene_season) + 'E' + ' category:tv' # 1) showName SXX -SXXE
|
2014-06-06 23:55:14 +00:00
|
|
|
search_string['Season'].append(ep_string)
|
|
|
|
ep_string = show_name + ' Season ' + str(
|
|
|
|
ep_obj.scene_season) + ' -Ep*' + ' category:tv' # 2) showName Season X
|
|
|
|
search_string['Season'].append(ep_string)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-01 21:37:38 +00:00
|
|
|
return [search_string]
|
|
|
|
|
2014-04-30 13:49:50 +00:00
|
|
|
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
2014-03-10 05:18:05 +00:00
|
|
|
search_string = {'Episode': []}
|
|
|
|
|
2014-04-29 13:14:19 +00:00
|
|
|
if self.show.air_by_date:
|
|
|
|
for show_name in set(allPossibleShowNames(self.show)):
|
2014-04-27 14:48:19 +00:00
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
2015-06-08 23:35:09 +00:00
|
|
|
str(ep_obj.airdate).replace('-', ' ')
|
2014-04-28 09:15:29 +00:00
|
|
|
search_string['Episode'].append(ep_string)
|
2014-04-29 13:14:19 +00:00
|
|
|
elif self.show.sports:
|
|
|
|
for show_name in set(allPossibleShowNames(self.show)):
|
2014-04-28 09:15:29 +00:00
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
2015-06-08 23:35:09 +00:00
|
|
|
str(ep_obj.airdate).replace('-', '|') + '|' + \
|
|
|
|
ep_obj.airdate.strftime('%b')
|
2014-03-10 05:18:05 +00:00
|
|
|
search_string['Episode'].append(ep_string)
|
2014-06-06 22:27:20 +00:00
|
|
|
elif self.show.anime:
|
|
|
|
for show_name in set(allPossibleShowNames(self.show)):
|
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
2015-06-08 23:35:09 +00:00
|
|
|
'%02i' % int(ep_obj.scene_absolute_number)
|
2014-06-06 22:27:20 +00:00
|
|
|
search_string['Episode'].append(ep_string)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2014-04-29 13:14:19 +00:00
|
|
|
for show_name in set(allPossibleShowNames(self.show)):
|
2014-03-25 05:57:24 +00:00
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
2015-06-08 23:35:09 +00:00
|
|
|
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
|
|
|
|
'episodenumber': ep_obj.scene_episode} + '|' + \
|
|
|
|
sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.scene_season,
|
|
|
|
'episodenumber': ep_obj.scene_episode} + ' %s category:tv' % add_string
|
2014-03-10 05:18:05 +00:00
|
|
|
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return [search_string]
|
|
|
|
|
2014-07-21 05:47:13 +00:00
|
|
|
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
results = []
|
|
|
|
items = {'Season': [], 'Episode': [], 'RSS': []}
|
|
|
|
|
|
|
|
for mode in search_params.keys():
|
|
|
|
for search_string in search_params[mode]:
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-07-09 08:26:45 +00:00
|
|
|
for url in self.urls:
|
2015-06-08 23:35:09 +00:00
|
|
|
search_url = url
|
|
|
|
if 'RSS' == mode:
|
|
|
|
search_url += 'tv/?field=time_add&sorder=desc'
|
|
|
|
logger.log(u'KAT cache update URL: ' + search_url, logger.DEBUG)
|
2014-07-09 08:26:45 +00:00
|
|
|
else:
|
2015-06-08 23:35:09 +00:00
|
|
|
search_url += 'usearch/%s/?field=seeders&sorder=desc' % (urllib.quote(unidecode(search_string)))
|
|
|
|
logger.log(u'Search string: ' + search_url, logger.DEBUG)
|
2014-07-09 08:26:45 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
html = self.getURL(search_url)
|
2014-07-09 08:26:45 +00:00
|
|
|
if html:
|
2014-07-12 04:12:25 +00:00
|
|
|
self.url = url
|
2014-07-09 08:26:45 +00:00
|
|
|
break
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if not html:
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
2015-06-08 23:35:09 +00:00
|
|
|
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
2014-07-22 04:26:58 +00:00
|
|
|
torrent_table = soup.find('table', attrs={'class': 'data'})
|
|
|
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
# Continue only if one Release is found
|
2014-07-22 04:26:58 +00:00
|
|
|
if len(torrent_rows) < 2:
|
2015-06-08 23:35:09 +00:00
|
|
|
logger.log(u'The data returned from ' + self.name + ' does not contain any torrents',
|
2014-07-22 04:26:58 +00:00
|
|
|
logger.WARNING)
|
2014-03-25 05:57:24 +00:00
|
|
|
continue
|
|
|
|
|
2014-07-22 04:26:58 +00:00
|
|
|
for tr in torrent_rows[1:]:
|
|
|
|
try:
|
|
|
|
link = urlparse.urljoin(self.url,
|
|
|
|
(tr.find('div', {'class': 'torrentname'}).find_all('a')[1])['href'])
|
2015-06-08 23:35:09 +00:00
|
|
|
tid = tr.get('id')[-7:]
|
2014-07-22 04:26:58 +00:00
|
|
|
title = (tr.find('div', {'class': 'torrentname'}).find_all('a')[1]).text \
|
2015-06-08 23:35:09 +00:00
|
|
|
or (tr.find('div', {'class': 'torrentname'}).find_all('a')[2]).text
|
2014-07-22 04:26:58 +00:00
|
|
|
url = tr.find('a', 'imagnet')['href']
|
|
|
|
verified = True if tr.find('a', 'iverify') else False
|
2015-06-08 23:35:09 +00:00
|
|
|
# trusted = True if tr.find('img', {'alt': 'verified'}) else False
|
2014-07-22 04:26:58 +00:00
|
|
|
seeders = int(tr.find_all('td')[-2].text)
|
|
|
|
leechers = int(tr.find_all('td')[-1].text)
|
|
|
|
except (AttributeError, TypeError):
|
|
|
|
continue
|
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
if 'RSS' != mode and (seeders < self.minseed or leechers < self.minleech):
|
2014-07-22 04:26:58 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if self.confirmed and not verified:
|
|
|
|
logger.log(
|
2015-06-08 23:35:09 +00:00
|
|
|
u'KAT Provider found result ' + title + ' but that doesn\'t seem like a verified result so I\'m ignoring it',
|
2014-07-22 04:26:58 +00:00
|
|
|
logger.DEBUG)
|
|
|
|
continue
|
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
# Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
|
|
|
|
if 'Season' == mode and 'sponly' == search_mode:
|
2014-07-22 04:26:58 +00:00
|
|
|
ep_number = int(epcount / len(set(allPossibleShowNames(self.show))))
|
|
|
|
title = self._find_season_quality(title, link, ep_number)
|
|
|
|
|
|
|
|
if not title or not url:
|
|
|
|
continue
|
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
item = title, url, tid, seeders, leechers
|
2014-07-22 04:26:58 +00:00
|
|
|
|
|
|
|
items[mode].append(item)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
except Exception:
|
|
|
|
logger.log(u'Failed to parse ' + self.name + ' Traceback: ' + traceback.format_exc(),
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.ERROR)
|
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
# For each search mode sort all the items by seeders
|
2014-03-25 05:57:24 +00:00
|
|
|
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
|
|
|
|
|
|
|
results += items[mode]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
def _get_title_and_url(self, item):
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
title, url, tid, seeders, leechers = item
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
if title:
|
2015-06-08 23:35:09 +00:00
|
|
|
title = u'' + title.replace(' ', '.')
|
2014-07-15 02:00:53 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if url:
|
2014-03-25 05:57:24 +00:00
|
|
|
url = url.replace('&', '&')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
return title, url
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def findPropers(self, search_date=datetime.datetime.today()):
|
|
|
|
|
|
|
|
results = []
|
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
my_db = db.DBConnection()
|
|
|
|
sql_results = my_db.select(
|
2014-06-21 22:46:59 +00:00
|
|
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' +
|
|
|
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
|
|
|
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
|
|
|
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
|
|
|
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
|
|
|
)
|
2014-06-07 21:32:38 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
if not sql_results:
|
2014-03-10 05:18:05 +00:00
|
|
|
return []
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
for sqlshow in sql_results:
|
|
|
|
self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow['showid']))
|
2014-05-30 07:36:47 +00:00
|
|
|
if self.show:
|
2015-06-08 23:35:09 +00:00
|
|
|
cur_ep = self.show.getEpisode(int(sqlshow['season']), int(sqlshow['episode']))
|
2014-04-28 09:15:29 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
search_string = self._get_episode_search_strings(cur_ep, add_string='PROPER|REPACK')
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
for item in self._doSearch(search_string[0]):
|
2014-05-30 07:36:47 +00:00
|
|
|
title, url = self._get_title_and_url(item)
|
2014-07-15 02:00:53 +00:00
|
|
|
results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
2014-05-08 22:28:28 +00:00
|
|
|
def seedRatio(self):
|
2014-05-17 05:23:11 +00:00
|
|
|
return self.ratio
|
2014-05-08 22:28:28 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
class KATCache(tvcache.TVCache):
|
2015-06-08 23:35:09 +00:00
|
|
|
def __init__(self, this_provider):
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
tvcache.TVCache.__init__(self, this_provider)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 23:35:09 +00:00
|
|
|
self.minTime = 20 # cache update frequency
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-08-30 08:47:00 +00:00
|
|
|
def _getRSSData(self):
|
2014-03-10 05:18:05 +00:00
|
|
|
search_params = {'RSS': ['rss']}
|
2014-08-05 16:19:54 +00:00
|
|
|
return self.provider._doSearch(search_params)
|
2014-03-25 05:57:24 +00:00
|
|
|
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
provider = KATProvider()
|