SickGear/sickbeard/classes.py

455 lines
14 KiB
Python
Raw Normal View History

# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from collections import OrderedDict
from sickbeard.common import Quality
from unidecode import unidecode
import datetime
import os
import re
import sickbeard
class SearchResult(object):
"""
Represents a search result from an indexer.
"""
def __init__(self, episodes):
self.provider = -1
Fixed issues with editing/saving custom scene exceptions. Fixed charmap issues for anime show names. Fixed issues with display show page and epCat key errors. Fixed duplicate log messages for clearing provider caches. Fixed issues with email notifier ep names not properly being encoded to UTF-8. TVDB<->TVRAGE Indexer ID mapping is now performed on demand to be used when needed such as newznab providers can be searched with tvrage_id's and some will return tvrage_id's that later can be used to create show objects from for faster and more accurate name parsing, mapping is done via Trakt API calls. Added stop event signals to schedualed tasks, SR now waits indefinate till task has been fully stopped before completing a restart or shutdown event. NameParserCache is now persistent and stores 200 parsed results at any given time for quicker lookups and better performance, this helps maintain results between updates or shutdown/startup events. Black and White lists for anime now only get used for anime shows as intended, performance gain for non-anime shows that dont need to load these lists. Internal name cache now builds it self on demand when needed per show request plus checks if show is already in cache and if true exits routine to save time. Schedualer and QueueItems classes are now a sub-class of threading.Thread and a stop threading event signal has been added to each. If I forgot to list something it doesn't mean its not fixed so please test and report back if anything is wrong or has been corrected by this new release.
2014-07-15 02:00:53 +00:00
# release show object
self.show = None
# URL to the NZB/torrent file
self.url = ''
# used by some providers to store extra info associated with the result
self.extraInfo = []
# assign function to get the data for the download
self.get_data_func = None
# list of TVEpisode objects that this result is associated with
self.episodes = episodes
# quality of the release
self.quality = Quality.UNKNOWN
# release name
self.name = ''
# size of the release (-1 = n/a)
self.size = -1
Fixed issues with editing/saving custom scene exceptions. Fixed charmap issues for anime show names. Fixed issues with display show page and epCat key errors. Fixed duplicate log messages for clearing provider caches. Fixed issues with email notifier ep names not properly being encoded to UTF-8. TVDB<->TVRAGE Indexer ID mapping is now performed on demand to be used when needed such as newznab providers can be searched with tvrage_id's and some will return tvrage_id's that later can be used to create show objects from for faster and more accurate name parsing, mapping is done via Trakt API calls. Added stop event signals to schedualed tasks, SR now waits indefinate till task has been fully stopped before completing a restart or shutdown event. NameParserCache is now persistent and stores 200 parsed results at any given time for quicker lookups and better performance, this helps maintain results between updates or shutdown/startup events. Black and White lists for anime now only get used for anime shows as intended, performance gain for non-anime shows that dont need to load these lists. Internal name cache now builds it self on demand when needed per show request plus checks if show is already in cache and if true exits routine to save time. Schedualer and QueueItems classes are now a sub-class of threading.Thread and a stop threading event signal has been added to each. If I forgot to list something it doesn't mean its not fixed so please test and report back if anything is wrong or has been corrected by this new release.
2014-07-15 02:00:53 +00:00
# release group
self.release_group = ''
Fixed issues with editing/saving custom scene exceptions. Fixed charmap issues for anime show names. Fixed issues with display show page and epCat key errors. Fixed duplicate log messages for clearing provider caches. Fixed issues with email notifier ep names not properly being encoded to UTF-8. TVDB<->TVRAGE Indexer ID mapping is now performed on demand to be used when needed such as newznab providers can be searched with tvrage_id's and some will return tvrage_id's that later can be used to create show objects from for faster and more accurate name parsing, mapping is done via Trakt API calls. Added stop event signals to schedualed tasks, SR now waits indefinate till task has been fully stopped before completing a restart or shutdown event. NameParserCache is now persistent and stores 200 parsed results at any given time for quicker lookups and better performance, this helps maintain results between updates or shutdown/startup events. Black and White lists for anime now only get used for anime shows as intended, performance gain for non-anime shows that dont need to load these lists. Internal name cache now builds it self on demand when needed per show request plus checks if show is already in cache and if true exits routine to save time. Schedualer and QueueItems classes are now a sub-class of threading.Thread and a stop threading event signal has been added to each. If I forgot to list something it doesn't mean its not fixed so please test and report back if anything is wrong or has been corrected by this new release.
2014-07-15 02:00:53 +00:00
2014-08-18 01:19:58 +00:00
# version
self.version = -1
# proper level
self._properlevel = 0
# is a repack
self.is_repack = False
# provider unique id
self.puid = None
@property
def properlevel(self):
return self._properlevel
@properlevel.setter
def properlevel(self, v):
if isinstance(v, (int, long)):
self._properlevel = v
def __str__(self):
if self.provider is None:
return 'Invalid provider, unable to print self'
return '\n'.join([
'%s @ %s' % (self.provider.name, self.url),
'Extra Info:',
'\n'.join([' %s' % x for x in self.extraInfo]),
'Episode: %s' % self.episodes,
'Quality: %s' % Quality.qualityStrings[self.quality],
'Name: %s' % self.name,
'Size: %s' % self.size,
'Release Group: %s' % self.release_group])
def get_data(self):
if None is not self.get_data_func:
try:
return self.get_data_func(self.url)
except (StandardError, Exception):
pass
if self.extraInfo and 0 < len(self.extraInfo):
return self.extraInfo[0]
return None
class NZBSearchResult(SearchResult):
"""
Regular NZB result with an URL to the NZB
"""
resultType = 'nzb'
class NZBDataSearchResult(SearchResult):
"""
NZB result where the actual NZB XML data is stored in the extraInfo
"""
resultType = 'nzbdata'
class TorrentSearchResult(SearchResult):
"""
Torrent result with an URL to the torrent
"""
resultType = 'torrent'
# torrent hash
content = None
hash = None
class ShowFilter(object):
def __init__(self, config, log=None):
self.config = config
self.log = log
self.bad_names = [re.compile('(?i)%s' % r) for r in (
'[*]+\s*(?:403:|do not add|dupli[^s]+\s*(?:\d+|<a\s|[*])|inval)',
'(?:inval|not? allow(ed)?)(?:[,\s]*period)?\s*[*]',
'[*]+\s*dupli[^\s*]+\s*[*]+\s*(?:\d+|<a\s)',
'\s(?:dupli[^s]+\s*(?:\d+|<a\s|[*]))'
)]
def _is_bad_name(self, show):
return isinstance(show, dict) and 'seriesname' in show and isinstance(show['seriesname'], (str, unicode)) \
and any([x.search(show['seriesname']) for x in self.bad_names])
@staticmethod
def _fix_firstaired(show):
if 'firstaired' not in show:
show['firstaired'] = '1900-01-01'
@staticmethod
def _dict_prevent_none(d, key, default):
v = None
if isinstance(d, dict):
v = d.get(key, default)
return (v, default)[None is v]
@staticmethod
def _fix_seriesname(show):
if isinstance(show, dict) and 'seriesname' in show and isinstance(show['seriesname'], (str, unicode)):
show['seriesname'] = ShowFilter._dict_prevent_none(show, 'seriesname', '').strip()
class AllShowsNoFilterListUI(ShowFilter):
"""
This class is for indexer api. Used for searching, no filter or smart select
"""
def __init__(self, config, log=None):
super(AllShowsNoFilterListUI, self).__init__(config, log)
def select_series(self, all_series):
search_results = []
# get all available shows
if all_series:
for cur_show in all_series:
self._fix_seriesname(cur_show)
if cur_show in search_results or self._is_bad_name(cur_show):
continue
self._fix_firstaired(cur_show)
if cur_show not in search_results:
search_results += [cur_show]
return search_results
class AllShowsListUI(ShowFilter):
"""
This class is for indexer api. Instead of prompting with a UI to pick the
desired result out of a list of shows it tries to be smart about it
based on what shows are in SB.
"""
def __init__(self, config, log=None):
super(AllShowsListUI, self).__init__(config, log)
def select_series(self, all_series):
search_results = []
# get all available shows
if all_series:
search_term = self.config.get('searchterm', '').strip().lower()
if search_term:
# try to pick a show that's in my show list
for cur_show in all_series:
self._fix_seriesname(cur_show)
if cur_show in search_results or self._is_bad_name(cur_show):
continue
seriesnames = []
if 'seriesname' in cur_show:
name = cur_show['seriesname'].lower()
seriesnames += [name, unidecode(name.encode('utf-8').decode('utf-8'))]
if 'aliases' in cur_show:
if isinstance(cur_show['aliases'], list):
for a in cur_show['aliases']:
name = a.strip().lower()
seriesnames += [name, unidecode(name.encode('utf-8').decode('utf-8'))]
elif isinstance(cur_show['aliases'], (str, unicode)):
name = cur_show['aliases'].strip().lower()
seriesnames += name.split('|') + unidecode(name.encode('utf-8').decode('utf-8')).split('|')
if search_term in set(seriesnames):
self._fix_firstaired(cur_show)
if cur_show not in search_results:
search_results += [cur_show]
return search_results
class ShowListUI(ShowFilter):
"""
This class is for tvdb-api. Instead of prompting with a UI to pick the
desired result out of a list of shows it tries to be smart about it
based on what shows are in SB.
"""
def __init__(self, config, log=None):
super(ShowListUI, self).__init__(config, log)
def select_series(self, all_series):
try:
# try to pick a show that's in my show list
for curShow in all_series:
self._fix_seriesname(curShow)
if self._is_bad_name(curShow):
continue
if filter(lambda x: int(x.indexerid) == int(curShow['id']), sickbeard.showList):
return curShow
except (StandardError, Exception):
pass
# if nothing matches then return first result
return all_series[0]
class Proper:
def __init__(self, name, url, date, show, parsed_show=None, size=-1, puid=None):
self.name = name
self.url = url
self.date = date
self.size = size
self.puid = puid
self.provider = None
self.quality = Quality.UNKNOWN
2014-08-18 01:19:58 +00:00
self.release_group = None
self.version = -1
Add smart logic to reduce api hits to newznab server types and improve how nzbs are downloaded. Add newznab smart logic to avoid missing releases when there are a great many recent releases. Change improve performance by using newznab server advertised capabilities. Change config/providers newznab to display only non-default categories. Change use scene season for wanted segment in backlog if show is scene numbering. Change combine Manage Searches / Backlog Search / Limited and Full to Force. Change consolidate limited and full backlog. Change config / Search / Backlog search frequency to instead spread backlog searches over a number of days. Change migrate minimum used value for search frequency into new minimum 7 for search spread. Change restrict nzb providers to 1 backlog batch run per day. Add to Config/Search/Unaired episodes/Allow episodes that are released early. Add to Config/Search/Unaired episodes/Use specific api requests to search for early episode releases. Add use related ids for newznab searches to increase search efficiency. Add periodic update of related show ids. Change terminology Edit Show/"Post processing" tab name to "Other". Add advanced feature "Related show IDs" to Edit Show/Other used for finding episodes and TV info. Add search info source image links to those that have zero id under Edit Show/Other/"Related show IDs". Add "set master" button to Edit Show/Other/"Related show IDs" for info source that can be changed. Change terminology displayShow "Indexers" to "Links" to cover internal and web links. Change add related show info sources on displayShow page. Change don't display "temporarily" defunct TVRage image link on displayShow pages unless it is master info source. Change if a defunct info source is the master of a show then present a link on displayShow to edit related show IDs. Change simplify the next backlog search run time display in the page footer. Change try ssl when fetching data thetvdb, imdb, trakt, scene exception. Change improve reliability to Trakt notifier by using show related id support. Change improve config/providers newznab categories layout. Change show loaded log message at start up and include info source. Change if episode has no airdate then set status to unaired (was skipped). Technical Change move scene_exceptions table from cache.db to sickbeard.db. Add related ids to show obj. Add use of mapped indexer ids for newznab. Add indexer to sql in wanted_eps. Add aired in (scene) season for wanted episodes. Add need_anime, need_sports, need_sd, need_hd, need_uhd to wanted episodes and added as parameter to update_providers. Add fix for lib lockfile/mkdirlockfile. Add set master TV info source logic. Change harden ui input validation. Add per action dialog confirmation. Change to reload page under more events. Change implement "Mark all added episodes Wanted to search for releases" when setting new info source.
2016-09-04 20:00:44 +00:00
self.parsed_show = parsed_show
Fixed issues with editing/saving custom scene exceptions. Fixed charmap issues for anime show names. Fixed issues with display show page and epCat key errors. Fixed duplicate log messages for clearing provider caches. Fixed issues with email notifier ep names not properly being encoded to UTF-8. TVDB<->TVRAGE Indexer ID mapping is now performed on demand to be used when needed such as newznab providers can be searched with tvrage_id's and some will return tvrage_id's that later can be used to create show objects from for faster and more accurate name parsing, mapping is done via Trakt API calls. Added stop event signals to schedualed tasks, SR now waits indefinate till task has been fully stopped before completing a restart or shutdown event. NameParserCache is now persistent and stores 200 parsed results at any given time for quicker lookups and better performance, this helps maintain results between updates or shutdown/startup events. Black and White lists for anime now only get used for anime shows as intended, performance gain for non-anime shows that dont need to load these lists. Internal name cache now builds it self on demand when needed per show request plus checks if show is already in cache and if true exits routine to save time. Schedualer and QueueItems classes are now a sub-class of threading.Thread and a stop threading event signal has been added to each. If I forgot to list something it doesn't mean its not fixed so please test and report back if anything is wrong or has been corrected by this new release.
2014-07-15 02:00:53 +00:00
self.show = show
self.indexer = None
self.indexerid = -1
self.season = -1
self.episode = -1
self.scene_season = -1
self.scene_episode = -1
def __str__(self):
return str(self.date) + ' ' + self.name + ' ' + str(self.season) + 'x' + str(self.episode) + ' of ' + str(
self.indexerid) + ' from ' + str(sickbeard.indexerApi(self.indexer).name)
class ErrorViewer:
"""
Keeps a static list of UIErrors to be displayed on the UI and allows
the list to be cleared.
"""
errors = []
def __init__(self):
ErrorViewer.errors = []
@staticmethod
def add(error):
ErrorViewer.errors.append(error)
@staticmethod
def clear():
ErrorViewer.errors = []
class UIError:
"""
Represents an error to be displayed in the web UI.
"""
def __init__(self, message):
self.message = message
self.time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
class OrderedDefaultdict(OrderedDict):
def __init__(self, *args, **kwargs):
if not args:
self.default_factory = None
else:
if not (args[0] is None or callable(args[0])):
raise TypeError('first argument must be callable or None')
self.default_factory = args[0]
args = args[1:]
super(OrderedDefaultdict, self).__init__(*args, **kwargs)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = default = self.default_factory()
return default
def __reduce__(self): # optional, for pickle support
args = (self.default_factory,) if self.default_factory else ()
return self.__class__, args, None, None, self.iteritems()
# backport from python 3
def move_to_end(self, key, last=True):
"""Move an existing element to the end (or beginning if last==False).
Raises KeyError if the element does not exist.
When last=True, acts like a fast version of self[key]=self.pop(key).
"""
link_prev, link_next, key = link = self._OrderedDict__map[key]
link_prev[1] = link_next
link_next[0] = link_prev
root = self._OrderedDict__root
if last:
last = root[0]
link[0] = last
link[1] = root
last[1] = root[0] = link
else:
first = root[1]
link[0] = root
link[1] = first
root[1] = first[0] = link
def first_key(self):
return self._OrderedDict__root[1][2]
def last_key(self):
return self._OrderedDict__root[0][2]
class ImageUrlList(list):
def __init__(self, max_age=30):
super(ImageUrlList, self).__init__()
self.max_age = max_age
def add_url(self, url):
self.remove_old()
cache_item = (url, datetime.datetime.now())
for n, x in enumerate(self):
if self._is_cache_item(x) and url == x[0]:
self[n] = cache_item
return
self.append(cache_item)
@staticmethod
def _is_cache_item(item):
return isinstance(item, (tuple, list)) and 2 == len(item)
def remove_old(self):
age_limit = datetime.datetime.now() - datetime.timedelta(minutes=self.max_age)
self[:] = [x for x in self if self._is_cache_item(x) and age_limit < x[1]]
def __repr__(self):
return str([x[0] for x in self if self._is_cache_item(x)])
def __contains__(self, url):
for x in self:
if self._is_cache_item(x) and url == x[0]:
return True
return False
def remove(self, url):
for x in self:
if self._is_cache_item(x) and url == x[0]:
super(ImageUrlList, self).remove(x)
break
if 'nt' == os.name:
import ctypes
class WinEnv:
def __init__(self):
pass
@staticmethod
def get_environment_variable(name):
name = unicode(name) # ensures string argument is unicode
n = ctypes.windll.kernel32.GetEnvironmentVariableW(name, None, 0)
result = None
if n:
buf = ctypes.create_unicode_buffer(u'\0'*n)
ctypes.windll.kernel32.GetEnvironmentVariableW(name, buf, n)
result = buf.value
return result
def __getitem__(self, key):
return self.get_environment_variable(key)
def get(self, key, default=None):
r = self.get_environment_variable(key)
return r if r is not None else default
sickbeard.ENV = WinEnv()
else:
class LinuxEnv(object):
def __init__(self, environ):
self.environ = environ
def __getitem__(self, key):
v = self.environ.get(key)
try:
return v.decode(SYS_ENCODING) if isinstance(v, str) else v
except (UnicodeDecodeError, UnicodeEncodeError):
return v
def get(self, key, default=None):
v = self[key]
return v if v is not None else default
sickbeard.ENV = LinuxEnv(os.environ)