2016-09-04 20:00:44 +00:00
# Author: Nic Wolfe <nic@wolfeden.ca>
2014-03-10 05:18:05 +00:00
# URL: http://code.google.com/p/sickbeard/
#
2014-11-12 16:43:14 +00:00
# This file is part of SickGear.
2014-03-10 05:18:05 +00:00
#
2014-11-12 16:43:14 +00:00
# SickGear is free software: you can redistribute it and/or modify
2014-03-10 05:18:05 +00:00
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
2014-11-12 16:43:14 +00:00
# SickGear is distributed in the hope that it will be useful,
2014-03-10 05:18:05 +00:00
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
2014-11-12 16:43:14 +00:00
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
2014-03-10 05:18:05 +00:00
from __future__ import with_statement
import os . path
import datetime
import threading
import re
import glob
2014-08-29 10:29:56 +00:00
import stat
2014-03-10 05:18:05 +00:00
import traceback
2014-10-02 11:20:03 +00:00
import shutil
2014-03-10 05:18:05 +00:00
import sickbeard
import xml . etree . cElementTree as etree
2014-07-06 13:11:04 +00:00
from name_parser . parser import NameParser , InvalidNameException , InvalidShowException
2014-03-10 05:18:05 +00:00
from lib import subliminal
2016-09-04 20:00:44 +00:00
import fnmatch
2014-03-10 05:18:05 +00:00
2014-11-05 05:36:16 +00:00
try :
from lib . send2trash import send2trash
except ImportError :
pass
2014-03-10 05:18:05 +00:00
from lib . imdb import imdb
from sickbeard import db
2016-09-04 20:00:44 +00:00
from sickbeard import helpers , exceptions , logger , name_cache , indexermapper
2014-03-10 05:18:05 +00:00
from sickbeard . exceptions import ex
from sickbeard import image_cache
from sickbeard import notifiers
from sickbeard import postProcessor
from sickbeard import subtitles
from sickbeard import history
2016-03-24 18:37:09 +00:00
from sickbeard import network_timezones
2015-01-19 14:27:48 +00:00
from sickbeard . blackandwhitelist import BlackAndWhiteList
2016-09-04 20:00:44 +00:00
from sickbeard . indexermapper import del_mapping , save_mapping , MapStatus
from sickbeard . generic_queue import QueuePriorities
2014-03-10 05:18:05 +00:00
from sickbeard import encodingKludge as ek
2014-03-20 08:15:22 +00:00
from common import Quality , Overview , statusStrings
2014-03-25 05:57:24 +00:00
from common import DOWNLOADED , SNATCHED , SNATCHED_PROPER , SNATCHED_BEST , ARCHIVED , IGNORED , UNAIRED , WANTED , SKIPPED , \
UNKNOWN , FAILED
from common import NAMING_DUPLICATE , NAMING_EXTEND , NAMING_LIMITED_EXTEND , NAMING_SEPARATED_REPEAT , \
NAMING_LIMITED_EXTEND_E_PREFIXED
2014-05-27 07:44:23 +00:00
2014-06-11 14:16:24 +00:00
2014-06-24 03:25:20 +00:00
def dirty_setter ( attr_name ) :
def wrapper ( self , val ) :
if getattr ( self , attr_name ) != val :
setattr ( self , attr_name , val )
self . dirty = True
return wrapper
2014-07-15 02:00:53 +00:00
2014-03-10 05:18:05 +00:00
class TVShow ( object ) :
2016-02-23 20:55:05 +00:00
def __init__ ( self , indexer , indexerid , lang = ' ' ) :
2014-06-24 03:25:20 +00:00
self . _indexerid = int ( indexerid )
self . _indexer = int ( indexer )
2016-02-23 20:55:05 +00:00
self . _name = ' '
self . _imdbid = ' '
self . _network = ' '
self . _genre = ' '
self . _classification = ' '
2014-06-24 03:25:20 +00:00
self . _runtime = 0
self . _imdb_info = { }
self . _quality = int ( sickbeard . QUALITY_DEFAULT )
self . _flatten_folders = int ( sickbeard . FLATTEN_FOLDERS_DEFAULT )
self . _status = " "
self . _airs = " "
self . _startyear = 0
self . _paused = 0
self . _air_by_date = 0
self . _subtitles = int ( sickbeard . SUBTITLES_DEFAULT if sickbeard . SUBTITLES_DEFAULT else 0 )
self . _dvdorder = 0
self . _archive_firstmatch = 0
self . _lang = lang
self . _last_update_indexer = 1
self . _sports = 0
self . _anime = 0
self . _scene = 0
2016-02-23 20:55:05 +00:00
self . _rls_ignore_words = ' '
self . _rls_require_words = ' '
2015-03-22 11:52:56 +00:00
self . _overview = ' '
2015-04-07 03:10:50 +00:00
self . _tag = ' '
2016-09-04 20:00:44 +00:00
self . _mapped_ids = { }
2014-06-24 03:25:20 +00:00
self . dirty = True
2014-04-24 05:18:16 +00:00
2016-02-23 20:55:05 +00:00
self . _location = ' '
2014-03-10 05:18:05 +00:00
self . lock = threading . Lock ( )
2014-06-24 03:25:20 +00:00
self . isDirGood = False
2014-06-11 14:16:24 +00:00
self . episodes = { }
2016-02-23 20:55:05 +00:00
self . nextaired = ' '
2015-01-19 14:27:48 +00:00
self . release_groups = None
2014-03-10 05:18:05 +00:00
otherShow = helpers . findCertainShow ( sickbeard . showList , self . indexerid )
2014-03-20 18:03:22 +00:00
if otherShow != None :
2016-02-23 20:55:05 +00:00
raise exceptions . MultipleShowObjectsException ( ' Can \' t create a show if it already exists ' )
2014-03-10 05:18:05 +00:00
self . loadFromDB ( )
2016-02-23 20:55:05 +00:00
name = property ( lambda self : self . _name , dirty_setter ( ' _name ' ) )
indexerid = property ( lambda self : self . _indexerid , dirty_setter ( ' _indexerid ' ) )
indexer = property ( lambda self : self . _indexer , dirty_setter ( ' _indexer ' ) )
# location = property(lambda self: self._location, dirty_setter('_location'))
imdbid = property ( lambda self : self . _imdbid , dirty_setter ( ' _imdbid ' ) )
network = property ( lambda self : self . _network , dirty_setter ( ' _network ' ) )
genre = property ( lambda self : self . _genre , dirty_setter ( ' _genre ' ) )
classification = property ( lambda self : self . _classification , dirty_setter ( ' _classification ' ) )
runtime = property ( lambda self : self . _runtime , dirty_setter ( ' _runtime ' ) )
imdb_info = property ( lambda self : self . _imdb_info , dirty_setter ( ' _imdb_info ' ) )
quality = property ( lambda self : self . _quality , dirty_setter ( ' _quality ' ) )
flatten_folders = property ( lambda self : self . _flatten_folders , dirty_setter ( ' _flatten_folders ' ) )
status = property ( lambda self : self . _status , dirty_setter ( ' _status ' ) )
airs = property ( lambda self : self . _airs , dirty_setter ( ' _airs ' ) )
startyear = property ( lambda self : self . _startyear , dirty_setter ( ' _startyear ' ) )
paused = property ( lambda self : self . _paused , dirty_setter ( ' _paused ' ) )
air_by_date = property ( lambda self : self . _air_by_date , dirty_setter ( ' _air_by_date ' ) )
subtitles = property ( lambda self : self . _subtitles , dirty_setter ( ' _subtitles ' ) )
dvdorder = property ( lambda self : self . _dvdorder , dirty_setter ( ' _dvdorder ' ) )
archive_firstmatch = property ( lambda self : self . _archive_firstmatch , dirty_setter ( ' _archive_firstmatch ' ) )
lang = property ( lambda self : self . _lang , dirty_setter ( ' _lang ' ) )
last_update_indexer = property ( lambda self : self . _last_update_indexer , dirty_setter ( ' _last_update_indexer ' ) )
sports = property ( lambda self : self . _sports , dirty_setter ( ' _sports ' ) )
anime = property ( lambda self : self . _anime , dirty_setter ( ' _anime ' ) )
scene = property ( lambda self : self . _scene , dirty_setter ( ' _scene ' ) )
rls_ignore_words = property ( lambda self : self . _rls_ignore_words , dirty_setter ( ' _rls_ignore_words ' ) )
rls_require_words = property ( lambda self : self . _rls_require_words , dirty_setter ( ' _rls_require_words ' ) )
2015-03-22 11:52:56 +00:00
overview = property ( lambda self : self . _overview , dirty_setter ( ' _overview ' ) )
2015-04-07 03:10:50 +00:00
tag = property ( lambda self : self . _tag , dirty_setter ( ' _tag ' ) )
2014-07-15 02:00:53 +00:00
2016-09-04 20:00:44 +00:00
@property
def ids ( self ) :
if not self . _mapped_ids :
acquired_lock = self . lock . acquire ( False )
if acquired_lock :
try :
indexermapper . map_indexers_to_show ( self )
finally :
self . lock . release ( )
return self . _mapped_ids
@ids.setter
def ids ( self , value ) :
if isinstance ( value , dict ) :
for k , v in value . iteritems ( ) :
if k not in sickbeard . indexermapper . indexer_list or not isinstance ( v , dict ) or \
not isinstance ( v . get ( ' id ' ) , ( int , long ) ) or not isinstance ( v . get ( ' status ' ) , ( int , long ) ) or \
v . get ( ' status ' ) not in indexermapper . MapStatus . allstatus or \
not isinstance ( v . get ( ' date ' ) , datetime . date ) :
return
self . _mapped_ids = value
2014-07-15 02:00:53 +00:00
@property
def is_anime ( self ) :
if int ( self . anime ) > 0 :
2014-05-26 06:29:22 +00:00
return True
else :
return False
2014-05-27 07:44:23 +00:00
2014-07-15 02:00:53 +00:00
@property
def is_sports ( self ) :
if int ( self . sports ) > 0 :
2014-05-26 06:29:22 +00:00
return True
else :
return False
2014-05-27 07:44:23 +00:00
2014-07-15 02:00:53 +00:00
@property
def is_scene ( self ) :
if int ( self . scene ) > 0 :
2014-05-30 05:48:02 +00:00
return True
else :
return False
2014-03-10 05:18:05 +00:00
def _getLocation ( self ) :
# no dir check needed if missing show dirs are created during post-processing
if sickbeard . CREATE_MISSING_SHOW_DIRS :
return self . _location
if ek . ek ( os . path . isdir , self . _location ) :
return self . _location
else :
2016-02-23 20:55:05 +00:00
raise exceptions . ShowDirNotFoundException ( ' Show folder doesn \' t exist, you shouldn \' t be using it ' )
2014-03-10 05:18:05 +00:00
def _setLocation ( self , newLocation ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' Setter sets location to %s ' % newLocation , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# Don't validate dir if user wants to add shows without creating a dir
if sickbeard . ADD_SHOWS_WO_DIR or ek . ek ( os . path . isdir , newLocation ) :
2016-02-23 20:55:05 +00:00
dirty_setter ( ' _location ' ) ( self , newLocation )
2014-03-10 05:18:05 +00:00
self . _isDirGood = True
else :
2016-02-23 20:55:05 +00:00
raise exceptions . NoNFOException ( ' Invalid folder for the show! ' )
2014-07-15 02:00:53 +00:00
2014-03-10 05:18:05 +00:00
location = property ( _getLocation , _setLocation )
# delete references to anything that's not in the internal lists
def flushEpisodes ( self ) :
2014-06-11 14:16:24 +00:00
for curSeason in self . episodes :
for curEp in self . episodes [ curSeason ] :
myEp = self . episodes [ curSeason ] [ curEp ]
self . episodes [ curSeason ] [ curEp ] = None
2014-03-10 05:18:05 +00:00
del myEp
def getAllEpisodes ( self , season = None , has_location = False ) :
2016-02-23 20:55:05 +00:00
sql_selection = ' SELECT season, episode, '
2014-03-10 05:18:05 +00:00
# subselection to detect multi-episodes early, share_location > 0
2016-03-31 06:57:50 +00:00
sql_selection = sql_selection + ' (SELECT COUNT (*) FROM tv_episodes WHERE showid = tve.showid AND season = tve.season AND location != " " AND location = tve.location AND episode != tve.episode) AS share_location '
2014-03-10 05:18:05 +00:00
2016-02-23 20:55:05 +00:00
sql_selection = sql_selection + ' FROM tv_episodes tve WHERE showid = ' + str ( self . indexerid )
2014-03-10 05:18:05 +00:00
if season is not None :
2016-02-23 20:55:05 +00:00
sql_selection = sql_selection + ' AND season = ' + str ( season )
2014-03-10 05:18:05 +00:00
if has_location :
2016-03-31 06:57:50 +00:00
sql_selection = sql_selection + ' AND location != " " '
2014-03-10 05:18:05 +00:00
# need ORDER episode ASC to rename multi-episodes in order S01E01-02
2016-02-23 20:55:05 +00:00
sql_selection = sql_selection + ' ORDER BY season ASC, episode ASC '
2014-03-10 05:18:05 +00:00
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
results = myDB . select ( sql_selection )
ep_list = [ ]
for cur_result in results :
2016-02-23 20:55:05 +00:00
cur_ep = self . getEpisode ( int ( cur_result [ ' season ' ] ) , int ( cur_result [ ' episode ' ] ) )
2014-06-21 22:46:59 +00:00
if cur_ep :
cur_ep . relatedEps = [ ]
if cur_ep . location :
# if there is a location, check if it's a multi-episode (share_location > 0) and put them in relatedEps
2016-02-23 20:55:05 +00:00
if cur_result [ ' share_location ' ] > 0 :
2014-06-21 22:46:59 +00:00
related_eps_result = myDB . select (
2016-02-23 20:55:05 +00:00
' SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND episode != ? ORDER BY episode ASC ' ,
2014-06-21 22:46:59 +00:00
[ self . indexerid , cur_ep . season , cur_ep . location , cur_ep . episode ] )
for cur_related_ep in related_eps_result :
related_ep = self . getEpisode ( int ( cur_related_ep [ " season " ] ) , int ( cur_related_ep [ " episode " ] ) )
if related_ep not in cur_ep . relatedEps :
cur_ep . relatedEps . append ( related_ep )
ep_list . append ( cur_ep )
2014-03-10 05:18:05 +00:00
return ep_list
2014-05-30 07:36:47 +00:00
def getEpisode ( self , season = None , episode = None , file = None , noCreate = False , absolute_number = None , forceUpdate = False ) :
2014-03-10 05:18:05 +00:00
2014-05-26 06:29:22 +00:00
# if we get an anime get the real season and episode
2014-06-07 11:06:21 +00:00
if self . is_anime and absolute_number and not season and not episode :
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2016-02-23 20:55:05 +00:00
sql = ' SELECT * FROM tv_episodes WHERE showid = ? and absolute_number = ? and season != 0 '
2014-06-21 22:46:59 +00:00
sqlResults = myDB . select ( sql , [ self . indexerid , absolute_number ] )
2014-05-26 06:29:22 +00:00
if len ( sqlResults ) == 1 :
2016-02-23 20:55:05 +00:00
episode = int ( sqlResults [ 0 ] [ ' episode ' ] )
season = int ( sqlResults [ 0 ] [ ' season ' ] )
2014-05-27 07:44:23 +00:00
logger . log (
2016-02-23 20:55:05 +00:00
' Found episode by absolute_number: %s which is %s x %s ' % ( absolute_number , season , episode ) ,
logger . DEBUG )
2014-05-26 06:29:22 +00:00
elif len ( sqlResults ) > 1 :
2016-02-23 20:55:05 +00:00
logger . log ( ' Multiple entries for absolute number: %s in show: %s found. ' %
( absolute_number , self . name ) , logger . ERROR )
2014-05-26 06:29:22 +00:00
return None
else :
2014-05-27 07:44:23 +00:00
logger . log (
2016-02-23 20:55:05 +00:00
' No entries for absolute number: %s in show: %s found. ' % ( absolute_number , self . name ) , logger . DEBUG )
2014-05-26 06:29:22 +00:00
return None
2014-06-11 14:16:24 +00:00
if not season in self . episodes :
self . episodes [ season ] = { }
2014-06-01 05:39:24 +00:00
2014-06-11 14:16:24 +00:00
if not episode in self . episodes [ season ] or self . episodes [ season ] [ episode ] is None :
2014-03-10 05:18:05 +00:00
if noCreate :
return None
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : An object for episode %s x %s didn \' t exist in the cache, trying to create it ' %
( self . indexerid , season , episode ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2014-06-05 01:28:59 +00:00
if file :
2014-05-05 03:04:46 +00:00
ep = TVEpisode ( self , season , episode , file )
2014-03-10 05:18:05 +00:00
else :
2014-05-05 05:50:28 +00:00
ep = TVEpisode ( self , season , episode )
2014-07-21 22:16:04 +00:00
if ep != None :
2014-06-11 14:16:24 +00:00
self . episodes [ season ] [ episode ] = ep
2014-03-10 05:18:05 +00:00
2014-07-21 22:16:04 +00:00
return self . episodes [ season ] [ episode ]
2014-03-10 05:18:05 +00:00
def should_update ( self , update_date = datetime . date . today ( ) ) :
2014-09-28 02:16:22 +00:00
cur_indexerid = self . indexerid
2014-09-24 13:38:39 +00:00
# In some situations self.status = None.. need to figure out where that is!
if not self . status :
self . status = ' '
2015-05-04 19:14:29 +00:00
logger . log ( ' Status missing for showid: [ %s ] with status: [ %s ] ' %
2014-09-24 13:38:39 +00:00
( cur_indexerid , self . status ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2015-05-04 19:14:29 +00:00
myDB = db . DBConnection ( )
sql_result = myDB . mass_action (
[ [ ' SELECT airdate FROM [tv_episodes] WHERE showid = ? AND season > " 0 " ORDER BY season DESC, episode DESC LIMIT 1 ' , [ cur_indexerid ] ] ,
[ ' SELECT airdate FROM [tv_episodes] WHERE showid = ? AND season > " 0 " AND airdate > " 1 " ORDER BY airdate DESC LIMIT 1 ' , [ cur_indexerid ] ] ] )
2014-03-10 05:18:05 +00:00
2015-05-04 19:14:29 +00:00
last_airdate_unknown = int ( sql_result [ 0 ] [ 0 ] [ ' airdate ' ] ) < = 1 if sql_result and sql_result [ 0 ] else True
2014-03-10 05:18:05 +00:00
2015-05-04 19:14:29 +00:00
last_airdate = datetime . date . fromordinal ( sql_result [ 1 ] [ 0 ] [ ' airdate ' ] ) if sql_result and sql_result [ 1 ] else datetime . date . fromordinal ( 1 )
2014-03-10 05:18:05 +00:00
2015-05-04 19:14:29 +00:00
last_update_indexer = datetime . date . fromordinal ( self . last_update_indexer )
2014-06-21 22:46:59 +00:00
2015-05-04 19:14:29 +00:00
# if show is not 'Ended' and last episode aired less then 460 days ago or don't have an airdate for the last episode always update (status 'Continuing' or '')
2015-10-14 00:19:32 +00:00
update_days_limit = 2013
2015-05-04 19:14:29 +00:00
ended_limit = datetime . timedelta ( days = update_days_limit )
if ' Ended ' not in self . status and ( last_airdate == datetime . date . fromordinal ( 1 ) or last_airdate_unknown or ( update_date - last_airdate ) < = ended_limit or ( update_date - last_update_indexer ) > ended_limit ) :
return True
2014-03-10 05:18:05 +00:00
2015-05-04 19:14:29 +00:00
# in the first 460 days (last airdate), update regularly
airdate_diff = update_date - last_airdate
last_update_diff = update_date - last_update_indexer
2014-03-10 05:18:05 +00:00
2015-10-14 00:19:32 +00:00
update_step_list = [ [ 60 , 1 ] , [ 120 , 3 ] , [ 180 , 7 ] , [ 1281 , 15 ] , [ update_days_limit , 30 ] ]
2015-05-04 19:14:29 +00:00
for date_diff , interval in update_step_list :
if airdate_diff < = datetime . timedelta ( days = date_diff ) and last_update_diff > = datetime . timedelta ( days = interval ) :
2014-06-21 22:46:59 +00:00
return True
2014-03-10 05:18:05 +00:00
2015-10-14 00:19:32 +00:00
# update shows without an airdate for the last episode for update_days_limit days every 7 days
2015-05-04 19:14:29 +00:00
if last_airdate_unknown and airdate_diff < = ended_limit and last_update_diff > = datetime . timedelta ( days = 7 ) :
2014-03-10 05:18:05 +00:00
return True
2015-05-04 19:14:29 +00:00
else :
return False
2014-03-10 05:18:05 +00:00
2014-05-14 09:42:08 +00:00
def writeShowNFO ( self ) :
2014-03-10 05:18:05 +00:00
result = False
if not ek . ek ( os . path . isdir , self . _location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Show directory doesn \' t exist, skipping NFO generation ' % self . indexerid )
2014-03-10 05:18:05 +00:00
return False
2014-03-25 05:57:24 +00:00
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Writing NFOs for show ' % self . indexerid )
2014-03-10 05:18:05 +00:00
for cur_provider in sickbeard . metadata_provider_dict . values ( ) :
2014-05-14 09:42:08 +00:00
result = cur_provider . create_show_metadata ( self ) or result
2014-03-10 05:18:05 +00:00
return result
2014-05-14 09:42:08 +00:00
def writeMetadata ( self , show_only = False ) :
2014-03-10 05:18:05 +00:00
if not ek . ek ( os . path . isdir , self . _location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Show directory doesn \' t exist, skipping NFO generation ' % self . indexerid )
2014-03-10 05:18:05 +00:00
return
self . getImages ( )
2014-05-14 09:42:08 +00:00
self . writeShowNFO ( )
2014-03-10 05:18:05 +00:00
if not show_only :
2014-05-14 09:42:08 +00:00
self . writeEpisodeNFOs ( )
2014-03-10 05:18:05 +00:00
2014-05-14 09:42:08 +00:00
def writeEpisodeNFOs ( self ) :
2014-03-10 05:18:05 +00:00
if not ek . ek ( os . path . isdir , self . _location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Show directory doesn \' t exist, skipping NFO generation ' % self . indexerid )
2014-03-10 05:18:05 +00:00
return
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Writing NFOs for all episodes ' % self . indexerid )
2014-03-10 05:18:05 +00:00
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
sqlResults = myDB . select ( " SELECT * FROM tv_episodes WHERE showid = ? AND location != ' ' " , [ self . indexerid ] )
2014-03-10 05:18:05 +00:00
for epResult in sqlResults :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Retrieving/creating episode %s x %s ' % ( self . indexerid , epResult [ " season " ] , epResult [ " episode " ] ) ,
logger . DEBUG )
2014-03-10 05:18:05 +00:00
curEp = self . getEpisode ( epResult [ " season " ] , epResult [ " episode " ] )
2014-05-14 09:42:08 +00:00
curEp . createMetaFiles ( )
2014-03-10 05:18:05 +00:00
2014-05-14 09:42:08 +00:00
def updateMetadata ( self ) :
if not ek . ek ( os . path . isdir , self . _location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Show directory doesn \' t exist, skipping NFO generation ' % self . indexerid )
2014-05-14 09:42:08 +00:00
return
self . updateShowNFO ( )
def updateShowNFO ( self ) :
result = False
if not ek . ek ( os . path . isdir , self . _location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Show directory doesn \' t exist, skipping NFO generation ' % self . indexerid )
2014-05-14 09:42:08 +00:00
return False
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Updating NFOs for show with new indexer info ' % self . indexerid )
2014-05-14 09:42:08 +00:00
for cur_provider in sickbeard . metadata_provider_dict . values ( ) :
result = cur_provider . update_show_indexer_metadata ( self ) or result
return result
2014-03-10 05:18:05 +00:00
# find all media files in the show folder and create episodes for as many as possible
2014-05-04 03:16:26 +00:00
def loadEpisodesFromDir ( self ) :
2014-03-10 05:18:05 +00:00
if not ek . ek ( os . path . isdir , self . _location ) :
2016-02-01 19:37:32 +00:00
logger . log ( ' %s : Show directory doesn \' t exist, not loading episodes from disk ' % self . indexerid )
2014-03-10 05:18:05 +00:00
return
2016-02-01 19:37:32 +00:00
logger . log ( ' %s : Loading all episodes from the show directory %s ' % ( self . indexerid , self . _location ) )
2014-03-10 05:18:05 +00:00
# get file list
mediaFiles = helpers . listMediaFiles ( self . _location )
# create TVEpisodes from each media file (if possible)
2014-05-30 10:01:49 +00:00
sql_l = [ ]
2014-03-10 05:18:05 +00:00
for mediaFile in mediaFiles :
2014-04-27 10:31:54 +00:00
parse_result = None
2014-03-10 05:18:05 +00:00
curEpisode = None
2014-04-27 10:31:54 +00:00
2016-02-01 19:37:32 +00:00
logger . log ( ' %s : Creating episode from %s ' % ( self . indexerid , mediaFile ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
try :
curEpisode = self . makeEpFromFile ( ek . ek ( os . path . join , self . _location , mediaFile ) )
2015-06-08 12:47:01 +00:00
except ( exceptions . ShowNotFoundException , exceptions . EpisodeNotFoundException ) as e :
2016-02-01 19:37:32 +00:00
logger . log ( ' Episode %s returned an exception: %s ' % ( mediaFile , ex ( e ) ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
continue
except exceptions . EpisodeDeletedException :
2016-02-01 19:37:32 +00:00
logger . log ( ' The episode deleted itself when I tried making an object for it ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
if curEpisode is None :
continue
# see if we should save the release name in the db
ep_file_name = ek . ek ( os . path . basename , curEpisode . location )
ep_file_name = ek . ek ( os . path . splitext , ep_file_name ) [ 0 ]
try :
2014-04-27 11:46:21 +00:00
parse_result = None
2015-09-20 11:40:03 +00:00
np = NameParser ( False , showObj = self )
2014-03-10 05:18:05 +00:00
parse_result = np . parse ( ep_file_name )
2014-07-06 13:11:04 +00:00
except ( InvalidNameException , InvalidShowException ) :
2014-03-10 05:18:05 +00:00
pass
2015-03-18 19:21:44 +00:00
if ep_file_name and parse_result and None is not parse_result . release_group :
2014-03-25 05:57:24 +00:00
logger . log (
2016-02-01 19:37:32 +00:00
' Name %s gave release group of %s , seems valid ' % ( ep_file_name , parse_result . release_group ) ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
curEpisode . release_name = ep_file_name
# store the reference in the show
2014-03-20 18:03:22 +00:00
if curEpisode != None :
2014-03-10 05:18:05 +00:00
if self . subtitles :
try :
curEpisode . refreshSubtitles ( )
except :
2016-02-01 19:37:32 +00:00
logger . log ( ' %s : Could not refresh subtitles ' % self . indexerid , logger . ERROR )
2014-03-10 05:18:05 +00:00
logger . log ( traceback . format_exc ( ) , logger . DEBUG )
2014-05-30 10:01:49 +00:00
2015-03-29 11:20:29 +00:00
result = curEpisode . get_sql ( )
if None is not result :
sql_l . append ( result )
2014-05-30 10:01:49 +00:00
2015-03-29 11:20:29 +00:00
if 0 < len ( sql_l ) :
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
myDB . mass_action ( sql_l )
2014-06-30 15:57:32 +00:00
2014-03-25 05:57:24 +00:00
2016-02-23 20:55:05 +00:00
def loadEpisodesFromDB ( self , update = False ) :
2014-03-10 05:18:05 +00:00
2016-02-23 20:55:05 +00:00
logger . log ( ' Loading all episodes from the DB ' )
2014-03-10 05:18:05 +00:00
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2016-02-23 20:55:05 +00:00
sql = ' SELECT * FROM tv_episodes WHERE showid = ? AND indexer = ? '
sqlResults = myDB . select ( sql , [ self . indexerid , self . indexer ] )
2014-03-10 05:18:05 +00:00
scannedEps = { }
2014-03-26 19:28:46 +00:00
lINDEXER_API_PARMS = sickbeard . indexerApi ( self . indexer ) . api_params . copy ( )
2014-03-10 05:18:05 +00:00
if self . lang :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' language ' ] = self . lang
2014-03-10 05:18:05 +00:00
if self . dvdorder != 0 :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' dvdorder ' ] = True
2014-03-10 05:18:05 +00:00
2014-03-26 19:28:46 +00:00
t = sickbeard . indexerApi ( self . indexer ) . indexer ( * * lINDEXER_API_PARMS )
2014-03-10 05:18:05 +00:00
cachedShow = t [ self . indexerid ]
cachedSeasons = { }
2015-04-28 17:32:37 +00:00
if None is cachedShow :
2016-02-23 20:55:05 +00:00
logger . log ( ' No cache showdata to parse from %s ' % sickbeard . indexerApi ( self . indexer ) . name )
2015-04-28 17:32:37 +00:00
return scannedEps
2014-03-10 05:18:05 +00:00
for curResult in sqlResults :
deleteEp = False
curSeason = int ( curResult [ " season " ] )
curEpisode = int ( curResult [ " episode " ] )
2014-05-04 00:22:33 +00:00
2014-03-10 05:18:05 +00:00
if curSeason not in cachedSeasons :
try :
cachedSeasons [ curSeason ] = cachedShow [ curSeason ]
2015-06-08 12:47:01 +00:00
except sickbeard . indexer_seasonnotfound as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' Error when trying to load the episode from %s : %s ' %
( sickbeard . indexerApi ( self . indexer ) . name , e . message ) , logger . WARNING )
2014-03-10 05:18:05 +00:00
deleteEp = True
if not curSeason in scannedEps :
scannedEps [ curSeason ] = { }
2016-02-23 20:55:05 +00:00
logger . log ( ' Loading episode %s x %s from the DB ' % ( curSeason , curEpisode ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
try :
curEp = self . getEpisode ( curSeason , curEpisode )
2014-03-20 18:03:22 +00:00
# if we found out that the ep is no longer on TVDB then delete it from our database too
2014-03-10 05:18:05 +00:00
if deleteEp :
curEp . deleteEpisode ( )
2014-03-25 05:57:24 +00:00
2014-03-10 05:18:05 +00:00
curEp . loadFromDB ( curSeason , curEpisode )
2016-02-23 20:55:05 +00:00
curEp . loadFromIndexer ( tvapi = t , cachedSeason = cachedSeasons [ curSeason ] , update = update )
2014-03-10 05:18:05 +00:00
scannedEps [ curSeason ] [ curEpisode ] = True
except exceptions . EpisodeDeletedException :
2016-02-23 20:55:05 +00:00
logger . log ( ' Tried loading an episode from the DB that should have been deleted, skipping it ' ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
continue
return scannedEps
2016-02-23 20:55:05 +00:00
def loadEpisodesFromIndexer ( self , cache = True , update = False ) :
2014-03-10 05:18:05 +00:00
2014-03-26 19:28:46 +00:00
lINDEXER_API_PARMS = sickbeard . indexerApi ( self . indexer ) . api_params . copy ( )
2014-03-10 05:18:05 +00:00
if not cache :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' cache ' ] = False
2014-03-10 05:18:05 +00:00
if self . lang :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' language ' ] = self . lang
2014-03-10 05:18:05 +00:00
if self . dvdorder != 0 :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' dvdorder ' ] = True
2014-03-10 05:18:05 +00:00
try :
2014-03-26 19:28:46 +00:00
t = sickbeard . indexerApi ( self . indexer ) . indexer ( * * lINDEXER_API_PARMS )
2014-03-10 05:18:05 +00:00
showObj = t [ self . indexerid ]
2014-03-25 05:57:24 +00:00
except sickbeard . indexer_error :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s timed out, unable to update episodes from %s ' %
( sickbeard . indexerApi ( self . indexer ) . name , sickbeard . indexerApi ( self . indexer ) . name ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
return None
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Loading all episodes from %s .. ' % ( self . indexerid , sickbeard . indexerApi ( self . indexer ) . name ) )
2014-03-10 05:18:05 +00:00
scannedEps = { }
2014-03-20 10:24:58 +00:00
sql_l = [ ]
2014-03-10 05:18:05 +00:00
for season in showObj :
scannedEps [ season ] = { }
for episode in showObj [ season ] :
# need some examples of wtf episode 0 means to decide if we want it or not
if episode == 0 :
continue
try :
ep = self . getEpisode ( season , episode )
except exceptions . EpisodeNotFoundException :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : %s object for %s x %s is incomplete, skipping this episode ' %
( self . indexerid , sickbeard . indexerApi ( self . indexer ) . name , season , episode ) )
2014-03-10 05:18:05 +00:00
continue
else :
try :
2016-02-23 20:55:05 +00:00
ep . loadFromIndexer ( tvapi = t , update = update )
2014-03-10 05:18:05 +00:00
except exceptions . EpisodeDeletedException :
2016-02-23 20:55:05 +00:00
logger . log ( ' The episode was deleted, skipping the rest of the load ' )
2014-03-10 05:18:05 +00:00
continue
with ep . lock :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Loading info from %s for episode %s x %s ' %
( self . indexerid , sickbeard . indexerApi ( self . indexer ) . name , season , episode ) , logger . DEBUG )
ep . loadFromIndexer ( season , episode , tvapi = t , update = update )
2014-05-30 10:01:49 +00:00
2015-03-29 11:20:29 +00:00
result = ep . get_sql ( )
if None is not result :
sql_l . append ( result )
2014-03-10 05:18:05 +00:00
scannedEps [ season ] [ episode ] = True
2015-03-29 11:20:29 +00:00
if 0 < len ( sql_l ) :
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
myDB . mass_action ( sql_l )
2014-06-30 15:57:32 +00:00
2014-03-20 10:24:58 +00:00
2014-03-10 05:18:05 +00:00
# Done updating save last update date
self . last_update_indexer = datetime . date . today ( ) . toordinal ( )
self . saveToDB ( )
return scannedEps
def getImages ( self , fanart = None , poster = None ) :
fanart_result = poster_result = banner_result = False
season_posters_result = season_banners_result = season_all_poster_result = season_all_banner_result = False
for cur_provider in sickbeard . metadata_provider_dict . values ( ) :
# FIXME: Needs to not show this message if the option is not enabled?
2016-02-23 20:55:05 +00:00
logger . log ( ' Running metadata routines for %s ' % cur_provider . name , logger . DEBUG )
2014-03-10 05:18:05 +00:00
fanart_result = cur_provider . create_fanart ( self ) or fanart_result
poster_result = cur_provider . create_poster ( self ) or poster_result
banner_result = cur_provider . create_banner ( self ) or banner_result
season_posters_result = cur_provider . create_season_posters ( self ) or season_posters_result
season_banners_result = cur_provider . create_season_banners ( self ) or season_banners_result
season_all_poster_result = cur_provider . create_season_all_poster ( self ) or season_all_poster_result
season_all_banner_result = cur_provider . create_season_all_banner ( self ) or season_all_banner_result
return fanart_result or poster_result or banner_result or season_posters_result or season_banners_result or season_all_poster_result or season_all_banner_result
# make a TVEpisode object from a media file
def makeEpFromFile ( self , file ) :
if not ek . ek ( os . path . isfile , file ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Not a real file... %s ' % ( self . indexerid , file ) )
2014-03-10 05:18:05 +00:00
return None
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Creating episode object from %s ' % ( self . indexerid , file ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
try :
2015-09-20 11:40:03 +00:00
my_parser = NameParser ( showObj = self )
2015-08-21 02:32:27 +00:00
parse_result = my_parser . parse ( file )
2014-03-10 05:18:05 +00:00
except InvalidNameException :
2016-02-23 20:55:05 +00:00
logger . log ( ' Unable to parse the filename %s into a valid episode ' % file , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return None
2014-07-06 17:12:18 +00:00
except InvalidShowException :
2016-02-23 20:55:05 +00:00
logger . log ( ' Unable to parse the filename %s into a valid show ' % file , logger . DEBUG )
2014-07-06 17:12:18 +00:00
return None
2014-03-10 05:18:05 +00:00
2014-07-24 18:16:59 +00:00
if not len ( parse_result . episode_numbers ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' parse_result: %s ' % parse_result )
logger . log ( ' No episode number found in %s , ignoring it ' % file , logger . ERROR )
2014-03-10 05:18:05 +00:00
return None
# for now lets assume that any episode in the show dir belongs to that show
2015-08-21 02:32:27 +00:00
season = parse_result . season_number if None is not parse_result . season_number else 1
2014-03-10 05:18:05 +00:00
episodes = parse_result . episode_numbers
2015-08-21 02:32:27 +00:00
root_ep = None
2014-03-10 05:18:05 +00:00
2014-05-30 10:01:49 +00:00
sql_l = [ ]
2015-08-21 02:32:27 +00:00
for cur_ep_num in episodes :
2014-03-10 05:18:05 +00:00
2015-08-21 02:32:27 +00:00
episode = int ( cur_ep_num )
2014-03-10 05:18:05 +00:00
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : %s parsed to %s %s x %s ' % ( self . indexerid , file , self . name , season , episode ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2015-08-21 02:32:27 +00:00
check_quality_again = False
2014-03-10 05:18:05 +00:00
same_file = False
2015-08-21 02:32:27 +00:00
cur_ep = self . getEpisode ( season , episode )
2014-03-10 05:18:05 +00:00
2015-08-21 02:32:27 +00:00
if None is cur_ep :
2014-03-10 05:18:05 +00:00
try :
2015-08-21 02:32:27 +00:00
cur_ep = self . getEpisode ( season , episode , file )
2014-03-10 05:18:05 +00:00
except exceptions . EpisodeNotFoundException :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Unable to figure out what this file is, skipping ' % self . indexerid , logger . ERROR )
2014-03-10 05:18:05 +00:00
continue
else :
# if there is a new file associated with this ep then re-check the quality
2016-02-01 19:37:32 +00:00
status , quality = sickbeard . common . Quality . splitCompositeStatus ( cur_ep . status )
if IGNORED == status :
continue
if ( cur_ep . location and ek . ek ( os . path . normpath , cur_ep . location ) != ek . ek ( os . path . normpath , file ) ) or \
( not cur_ep . location and file ) or \
( SKIPPED == status ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' The old episode had a different file associated with it, re-checking the quality ' +
' based on the new filename %s ' % file , logger . DEBUG )
2015-08-21 02:32:27 +00:00
check_quality_again = True
2014-03-10 05:18:05 +00:00
2015-08-21 02:32:27 +00:00
with cur_ep . lock :
2016-02-01 19:37:32 +00:00
old_size = cur_ep . file_size if cur_ep . location and status != SKIPPED else 0
2015-08-21 02:32:27 +00:00
cur_ep . location = file
2014-03-10 05:18:05 +00:00
# if the sizes are the same then it's probably the same file
2015-08-21 02:32:27 +00:00
if old_size and cur_ep . file_size == old_size :
2014-03-10 05:18:05 +00:00
same_file = True
else :
same_file = False
2015-08-21 02:32:27 +00:00
cur_ep . checkForMetaFiles ( )
2014-03-10 05:18:05 +00:00
2015-08-21 02:32:27 +00:00
if None is root_ep :
root_ep = cur_ep
2014-03-10 05:18:05 +00:00
else :
2015-08-21 02:32:27 +00:00
if cur_ep not in root_ep . relatedEps :
root_ep . relatedEps . append ( cur_ep )
2014-03-10 05:18:05 +00:00
# if it's a new file then
if not same_file :
2015-08-21 02:32:27 +00:00
cur_ep . release_name = ' '
2014-03-10 05:18:05 +00:00
# if they replace a file on me I'll make some attempt at re-checking the quality unless I know it's the same file
2015-08-21 02:32:27 +00:00
if check_quality_again and not same_file :
new_quality = Quality . nameQuality ( file , self . is_anime )
if Quality . UNKNOWN == new_quality :
new_quality = Quality . fileQuality ( file )
2016-02-23 20:55:05 +00:00
logger . log ( ' Since this file was renamed, file %s was checked and quality " %s " found '
2015-08-21 02:32:27 +00:00
% ( file , Quality . qualityStrings [ new_quality ] ) , logger . DEBUG )
2016-02-09 22:43:35 +00:00
status , quality = sickbeard . common . Quality . splitCompositeStatus ( cur_ep . status )
if Quality . UNKNOWN != new_quality or SKIPPED == status :
2015-08-21 02:32:27 +00:00
cur_ep . status = Quality . compositeStatus ( DOWNLOADED , new_quality )
2014-03-10 05:18:05 +00:00
# check for status/quality changes as long as it's a new file
2016-08-11 00:00:36 +00:00
elif not same_file and sickbeard . helpers . has_media_ext ( file ) \
2015-08-21 02:32:27 +00:00
and cur_ep . status not in Quality . DOWNLOADED + [ ARCHIVED , IGNORED ] :
2014-03-10 05:18:05 +00:00
2015-08-21 02:32:27 +00:00
old_status , old_quality = Quality . splitCompositeStatus ( cur_ep . status )
new_quality = Quality . nameQuality ( file , self . is_anime )
if Quality . UNKNOWN == new_quality :
new_quality = Quality . fileQuality ( file )
if Quality . UNKNOWN == new_quality :
new_quality = Quality . assumeQuality ( file )
2014-03-10 05:18:05 +00:00
2015-08-21 02:32:27 +00:00
new_status = None
2014-03-10 05:18:05 +00:00
# if it was snatched and now exists then set the status correctly
2015-08-21 02:32:27 +00:00
if SNATCHED == old_status and old_quality < = new_quality :
2016-02-23 20:55:05 +00:00
logger . log ( ' STATUS: this episode used to be snatched with quality %s but a file exists with quality %s so setting the status to DOWNLOADED '
2015-08-21 02:32:27 +00:00
% ( Quality . qualityStrings [ old_quality ] , Quality . qualityStrings [ new_quality ] ) , logger . DEBUG )
new_status = DOWNLOADED
2014-03-10 05:18:05 +00:00
# if it was snatched proper and we found a higher quality one then allow the status change
2015-08-21 02:32:27 +00:00
elif SNATCHED_PROPER == old_status and old_quality < new_quality :
2016-02-23 20:55:05 +00:00
logger . log ( ' STATUS: this episode used to be snatched proper with quality %s but a file exists with quality %s so setting the status to DOWNLOADED '
2015-08-21 02:32:27 +00:00
% ( Quality . qualityStrings [ old_quality ] , Quality . qualityStrings [ new_quality ] ) , logger . DEBUG )
new_status = DOWNLOADED
elif old_status not in ( SNATCHED , SNATCHED_PROPER ) :
new_status = DOWNLOADED
if None is not new_status :
with cur_ep . lock :
2016-02-23 20:55:05 +00:00
logger . log ( ' STATUS: we have an associated file, so setting the status from %s to DOWNLOADED/ %s '
2015-09-12 01:29:35 +00:00
% ( cur_ep . status , Quality . compositeStatus ( new_status , new_quality ) ) , logger . DEBUG )
2015-08-21 02:32:27 +00:00
cur_ep . status = Quality . compositeStatus ( new_status , new_quality )
with cur_ep . lock :
result = cur_ep . get_sql ( )
2015-03-29 11:20:29 +00:00
if None is not result :
sql_l . append ( result )
2014-05-30 10:01:49 +00:00
2015-03-29 11:20:29 +00:00
if 0 < len ( sql_l ) :
2015-08-21 02:32:27 +00:00
my_db = db . DBConnection ( )
my_db . mass_action ( sql_l )
2014-03-10 05:18:05 +00:00
# creating metafiles on the root should be good enough
2015-08-21 02:32:27 +00:00
if sickbeard . USE_FAILED_DOWNLOADS and root_ep is not None :
with root_ep . lock :
root_ep . createMetaFiles ( )
2014-03-10 05:18:05 +00:00
2015-08-21 02:32:27 +00:00
return root_ep
2014-03-10 05:18:05 +00:00
def loadFromDB ( self , skipNFO = False ) :
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2016-02-23 20:55:05 +00:00
sqlResults = myDB . select ( ' SELECT * FROM tv_shows WHERE indexer_id = ? ' , [ self . indexerid ] )
2014-03-10 05:18:05 +00:00
if len ( sqlResults ) > 1 :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Loading show info from database ' % self . indexerid )
2014-03-10 05:18:05 +00:00
raise exceptions . MultipleDBShowsException ( )
elif len ( sqlResults ) == 0 :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Unable to find the show in the database ' % self . indexerid )
2014-03-10 05:18:05 +00:00
return
else :
if not self . indexer :
2015-03-12 23:23:32 +00:00
self . indexer = int ( sqlResults [ 0 ] [ ' indexer ' ] )
2014-03-10 05:18:05 +00:00
if not self . name :
2015-03-12 23:23:32 +00:00
self . name = sqlResults [ 0 ] [ ' show_name ' ]
2014-03-10 05:18:05 +00:00
if not self . network :
2015-03-12 23:23:32 +00:00
self . network = sqlResults [ 0 ] [ ' network ' ]
2014-03-10 05:18:05 +00:00
if not self . genre :
2015-03-12 23:23:32 +00:00
self . genre = sqlResults [ 0 ] [ ' genre ' ]
2014-03-10 05:18:05 +00:00
if self . classification is None :
2015-03-12 23:23:32 +00:00
self . classification = sqlResults [ 0 ] [ ' classification ' ]
2014-03-10 05:18:05 +00:00
2015-03-12 23:23:32 +00:00
self . runtime = sqlResults [ 0 ] [ ' runtime ' ]
2014-03-10 05:18:05 +00:00
2015-03-12 23:23:32 +00:00
self . status = sqlResults [ 0 ] [ ' status ' ]
2014-03-10 05:18:05 +00:00
if not self . status :
2015-03-12 23:23:32 +00:00
self . status = ' '
self . airs = sqlResults [ 0 ] [ ' airs ' ]
2014-03-10 05:18:05 +00:00
if not self . airs :
2015-03-12 23:23:32 +00:00
self . airs = ' '
self . startyear = sqlResults [ 0 ] [ ' startyear ' ]
2014-03-10 05:18:05 +00:00
if not self . startyear :
self . startyear = 0
2015-03-12 23:23:32 +00:00
self . air_by_date = sqlResults [ 0 ] [ ' air_by_date ' ]
2014-03-10 05:18:05 +00:00
if not self . air_by_date :
self . air_by_date = 0
2014-03-25 05:57:24 +00:00
2015-03-12 23:23:32 +00:00
self . anime = sqlResults [ 0 ] [ ' anime ' ]
if None is self . anime :
2014-05-30 05:48:02 +00:00
self . anime = 0
2015-03-12 23:23:32 +00:00
self . sports = sqlResults [ 0 ] [ ' sports ' ]
2014-04-28 09:15:29 +00:00
if not self . sports :
self . sports = 0
2015-03-12 23:23:32 +00:00
self . scene = sqlResults [ 0 ] [ ' scene ' ]
2014-05-30 05:48:02 +00:00
if not self . scene :
self . scene = 0
2015-03-12 23:23:32 +00:00
self . subtitles = sqlResults [ 0 ] [ ' subtitles ' ]
2014-03-10 05:18:05 +00:00
if self . subtitles :
self . subtitles = 1
else :
2014-03-25 05:57:24 +00:00
self . subtitles = 0
2014-03-10 05:18:05 +00:00
2015-03-12 23:23:32 +00:00
self . dvdorder = sqlResults [ 0 ] [ ' dvdorder ' ]
2014-03-10 05:18:05 +00:00
if not self . dvdorder :
self . dvdorder = 0
2015-03-12 23:23:32 +00:00
self . archive_firstmatch = sqlResults [ 0 ] [ ' archive_firstmatch ' ]
2014-03-18 13:50:13 +00:00
if not self . archive_firstmatch :
self . archive_firstmatch = 0
2015-03-12 23:23:32 +00:00
self . quality = int ( sqlResults [ 0 ] [ ' quality ' ] )
self . flatten_folders = int ( sqlResults [ 0 ] [ ' flatten_folders ' ] )
self . paused = int ( sqlResults [ 0 ] [ ' paused ' ] )
2014-03-10 05:18:05 +00:00
2014-10-28 06:43:53 +00:00
try :
2015-03-12 23:23:32 +00:00
self . location = sqlResults [ 0 ] [ ' location ' ]
2014-10-28 06:43:53 +00:00
except Exception :
2015-03-12 23:23:32 +00:00
dirty_setter ( ' _location ' ) ( self , sqlResults [ 0 ] [ ' location ' ] )
2014-10-28 06:43:53 +00:00
self . _isDirGood = False
2014-03-10 05:18:05 +00:00
if not self . lang :
2015-03-12 23:23:32 +00:00
self . lang = sqlResults [ 0 ] [ ' lang ' ]
2014-03-10 05:18:05 +00:00
2015-03-12 23:23:32 +00:00
self . last_update_indexer = sqlResults [ 0 ] [ ' last_update_indexer ' ]
2014-03-10 05:18:05 +00:00
2015-03-12 23:23:32 +00:00
self . rls_ignore_words = sqlResults [ 0 ] [ ' rls_ignore_words ' ]
self . rls_require_words = sqlResults [ 0 ] [ ' rls_require_words ' ]
2014-04-24 05:18:16 +00:00
2014-03-10 05:18:05 +00:00
if not self . imdbid :
2015-03-29 14:24:47 +00:00
imdbid = sqlResults [ 0 ] [ ' imdb_id ' ] or ' '
2015-03-27 19:25:34 +00:00
self . imdbid = ( ' ' , imdbid ) [ 2 < len ( imdbid ) ]
2015-03-12 23:23:32 +00:00
if self . is_anime :
self . release_groups = BlackAndWhiteList ( self . indexerid )
2014-03-10 05:18:05 +00:00
2015-03-22 11:52:56 +00:00
if not self . overview :
self . overview = sqlResults [ 0 ] [ ' overview ' ]
2015-04-07 03:10:50 +00:00
self . tag = sqlResults [ 0 ] [ ' tag ' ]
if not self . tag :
self . tag = ' Show List '
2016-09-04 20:00:44 +00:00
logger . log ( ' Loaded.. {: <9} {: <8} {} ' . format (
sickbeard . indexerApi ( self . indexer ) . config . get ( ' name ' ) + ' , ' , str ( self . indexerid ) + ' , ' , self . name ) )
2015-01-19 14:27:48 +00:00
2014-05-27 07:44:23 +00:00
# Get IMDb_info from database
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2016-02-23 20:55:05 +00:00
sqlResults = myDB . select ( ' SELECT * FROM imdb_info WHERE indexer_id = ? ' , [ self . indexerid ] )
2014-03-10 05:18:05 +00:00
2015-03-14 02:48:38 +00:00
if 0 < len ( sqlResults ) :
self . imdb_info = dict ( zip ( sqlResults [ 0 ] . keys ( ) , sqlResults [ 0 ] ) )
elif sickbeard . USE_IMDB_INFO :
2016-09-04 20:00:44 +00:00
logger . log ( ' %s : The next show update will attempt to find IMDb info for [ %s ] ' %
( self . indexerid , self . name ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return
2014-06-24 03:25:20 +00:00
self . dirty = False
return True
2014-03-10 05:18:05 +00:00
def loadFromIndexer ( self , cache = True , tvapi = None , cachedSeason = None ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Loading show info from %s ' % ( self . indexerid , sickbeard . indexerApi ( self . indexer ) . name ) )
2014-03-10 05:18:05 +00:00
# There's gotta be a better way of doing this but we don't wanna
# change the cache value elsewhere
if tvapi is None :
2014-03-26 19:28:46 +00:00
lINDEXER_API_PARMS = sickbeard . indexerApi ( self . indexer ) . api_params . copy ( )
2014-03-10 05:18:05 +00:00
if not cache :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' cache ' ] = False
2014-03-10 05:18:05 +00:00
if self . lang :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' language ' ] = self . lang
2014-03-10 05:18:05 +00:00
if self . dvdorder != 0 :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' dvdorder ' ] = True
2014-03-10 05:18:05 +00:00
2014-03-26 19:28:46 +00:00
t = sickbeard . indexerApi ( self . indexer ) . indexer ( * * lINDEXER_API_PARMS )
2014-03-10 05:18:05 +00:00
else :
t = tvapi
myEp = t [ self . indexerid ]
2015-03-19 15:34:11 +00:00
if None is myEp :
2016-02-23 20:55:05 +00:00
logger . log ( ' Show not found (maybe even removed?) ' , logger . WARNING )
2015-03-19 15:34:11 +00:00
return False
2014-03-10 05:18:05 +00:00
2014-03-11 19:39:25 +00:00
try :
2014-05-29 13:27:05 +00:00
self . name = myEp [ ' seriesname ' ] . strip ( )
2014-03-11 19:39:25 +00:00
except AttributeError :
2014-03-25 05:57:24 +00:00
raise sickbeard . indexer_attributenotfound (
" Found %s , but attribute ' seriesname ' was empty. " % ( self . indexerid ) )
2014-03-10 05:18:05 +00:00
self . classification = getattr ( myEp , ' classification ' , ' Scripted ' )
self . genre = getattr ( myEp , ' genre ' , ' ' )
self . network = getattr ( myEp , ' network ' , ' ' )
self . runtime = getattr ( myEp , ' runtime ' , ' ' )
self . imdbid = getattr ( myEp , ' imdb_id ' , ' ' )
if getattr ( myEp , ' airs_dayofweek ' , None ) is not None and getattr ( myEp , ' airs_time ' , None ) is not None :
self . airs = myEp [ " airs_dayofweek " ] + " " + myEp [ " airs_time " ]
if getattr ( myEp , ' firstaired ' , None ) is not None :
2014-05-29 05:40:12 +00:00
self . startyear = int ( str ( myEp [ " firstaired " ] ) . split ( ' - ' ) [ 0 ] )
2014-03-10 05:18:05 +00:00
self . status = getattr ( myEp , ' status ' , ' ' )
2015-03-22 11:52:56 +00:00
self . overview = getattr ( myEp , ' overview ' , ' ' )
2014-03-10 05:18:05 +00:00
2015-03-14 02:48:38 +00:00
def load_imdb_info ( self ) :
if not sickbeard . USE_IMDB_INFO :
return
from lib . imdb import _exceptions as imdb_exceptions
2016-02-23 20:55:05 +00:00
logger . log ( ' Retrieving show info from IMDb ' , logger . DEBUG )
2015-03-14 02:48:38 +00:00
try :
self . _get_imdb_info ( )
2015-09-16 23:52:18 +00:00
except imdb_exceptions . IMDbDataAccessError as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' Timeout waiting for IMDb api: %s ' % ex ( e ) , logger . WARNING )
2015-06-08 12:47:01 +00:00
except imdb_exceptions . IMDbError as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' Something is wrong with IMDb api: %s ' % ex ( e ) , logger . WARNING )
2015-06-08 12:47:01 +00:00
except Exception as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' Error loading IMDb info: %s ' % ex ( e ) , logger . ERROR )
logger . log ( ' %s ' % traceback . format_exc ( ) , logger . DEBUG )
2015-03-14 02:48:38 +00:00
def _get_imdb_info ( self ) :
2016-09-04 20:00:44 +00:00
if not self . imdbid and self . ids . get ( indexermapper . INDEXER_IMDB , { ' id ' : 0 } ) . get ( ' id ' , 0 ) < = 0 :
2015-03-14 02:48:38 +00:00
return
2014-03-10 05:18:05 +00:00
2016-09-04 20:00:44 +00:00
imdb_info = { ' imdb_id ' : self . imdbid or ' tt %07d ' % self . ids [ indexermapper . INDEXER_IMDB ] [ ' id ' ] ,
2014-03-25 05:57:24 +00:00
' title ' : ' ' ,
' year ' : ' ' ,
' akas ' : [ ] ,
' runtimes ' : ' ' ,
' genres ' : [ ] ,
' countries ' : ' ' ,
2014-04-24 20:21:04 +00:00
' country_codes ' : [ ] ,
2014-03-25 05:57:24 +00:00
' certificates ' : [ ] ,
' rating ' : ' ' ,
2014-03-10 05:18:05 +00:00
' votes ' : ' ' ,
2015-03-14 02:48:38 +00:00
' last_update ' : ' ' }
2014-03-25 05:57:24 +00:00
2015-03-14 02:48:38 +00:00
i = imdb . IMDb ( )
2016-09-04 20:00:44 +00:00
imdbTv = i . get_movie ( str ( re . sub ( ' [^0-9] ' , ' ' , self . imdbid or ' %07d ' % self . ids [ indexermapper . INDEXER_IMDB ] [ ' id ' ] ) ) )
2014-03-25 05:57:24 +00:00
2015-03-14 02:48:38 +00:00
for key in filter ( lambda x : x . replace ( ' _ ' , ' ' ) in imdbTv . keys ( ) , imdb_info . keys ( ) ) :
# Store only the first value for string type
if type ( imdb_info [ key ] ) == type ( ' ' ) and type ( imdbTv . get ( key ) ) == type ( [ ] ) :
imdb_info [ key ] = imdbTv . get ( key . replace ( ' _ ' , ' ' ) ) [ 0 ]
2014-06-07 18:36:26 +00:00
else :
2015-03-14 02:48:38 +00:00
imdb_info [ key ] = imdbTv . get ( key . replace ( ' _ ' , ' ' ) )
2014-06-07 18:36:26 +00:00
2015-03-14 02:48:38 +00:00
# Filter only the value
if imdb_info [ ' runtimes ' ] :
imdb_info [ ' runtimes ' ] = re . search ( ' \ d+ ' , imdb_info [ ' runtimes ' ] ) . group ( 0 )
else :
imdb_info [ ' runtimes ' ] = self . runtime
2014-03-25 05:57:24 +00:00
2015-03-14 02:48:38 +00:00
if imdb_info [ ' akas ' ] :
imdb_info [ ' akas ' ] = ' | ' . join ( imdb_info [ ' akas ' ] )
else :
imdb_info [ ' akas ' ] = ' '
2014-03-25 05:57:24 +00:00
2015-03-14 02:48:38 +00:00
# Join all genres in a string
if imdb_info [ ' genres ' ] :
imdb_info [ ' genres ' ] = ' | ' . join ( imdb_info [ ' genres ' ] )
else :
imdb_info [ ' genres ' ] = ' '
2014-03-25 05:57:24 +00:00
2015-03-14 02:48:38 +00:00
# Get only the production country certificate if any
if imdb_info [ ' certificates ' ] and imdb_info [ ' countries ' ] :
dct = { }
try :
for item in imdb_info [ ' certificates ' ] :
dct [ item . split ( ' : ' ) [ 0 ] ] = item . split ( ' : ' ) [ 1 ]
2014-03-25 05:57:24 +00:00
2015-03-14 02:48:38 +00:00
imdb_info [ ' certificates ' ] = dct [ imdb_info [ ' countries ' ] ]
except :
2014-03-25 05:57:24 +00:00
imdb_info [ ' certificates ' ] = ' '
2015-03-14 02:48:38 +00:00
else :
imdb_info [ ' certificates ' ] = ' '
if imdb_info [ ' country_codes ' ] :
imdb_info [ ' country_codes ' ] = ' | ' . join ( imdb_info [ ' country_codes ' ] )
else :
imdb_info [ ' country_codes ' ] = ' '
imdb_info [ ' last_update ' ] = datetime . date . today ( ) . toordinal ( )
2014-06-07 18:36:26 +00:00
2015-03-14 02:48:38 +00:00
# Rename dict keys without spaces for DB upsert
self . imdb_info = dict (
( k . replace ( ' ' , ' _ ' ) , k ( v ) if hasattr ( v , ' keys ' ) else v ) for k , v in imdb_info . items ( ) )
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Obtained info from IMDb -> %s ' % ( self . indexerid , self . imdb_info ) , logger . DEBUG )
2014-03-25 05:57:24 +00:00
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Parsed latest IMDb show info for [ %s ] ' % ( self . indexerid , self . name ) )
2014-03-25 05:57:24 +00:00
2014-03-10 05:18:05 +00:00
def nextEpisode ( self ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Finding the episode which airs next ' % self . indexerid , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2014-07-09 07:15:27 +00:00
curDate = datetime . date . today ( ) . toordinal ( )
if not self . nextaired or self . nextaired and curDate > self . nextaired :
myDB = db . DBConnection ( )
2014-07-15 02:00:53 +00:00
sqlResults = myDB . select (
2015-05-04 19:14:29 +00:00
' SELECT airdate, season, episode FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status in (?,?,?) ORDER BY airdate ASC LIMIT 1 ' ,
[ self . indexerid , datetime . date . today ( ) . toordinal ( ) , UNAIRED , WANTED , FAILED ] )
2014-03-10 05:18:05 +00:00
2014-07-09 07:15:27 +00:00
if sqlResults == None or len ( sqlResults ) == 0 :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : No episode found... need to implement a show status ' % self . indexerid , logger . DEBUG )
2015-05-04 19:14:29 +00:00
self . nextaired = ' '
2014-07-09 07:15:27 +00:00
else :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Found episode %s x %s ' % ( self . indexerid , sqlResults [ 0 ] [ ' season ' ] , sqlResults [ 0 ] [ ' episode ' ] ) ,
logger . DEBUG )
2014-07-09 07:15:27 +00:00
self . nextaired = sqlResults [ 0 ] [ ' airdate ' ]
return self . nextaired
2014-03-10 05:18:05 +00:00
2014-08-29 10:29:56 +00:00
def deleteShow ( self , full = False ) :
2014-03-10 05:18:05 +00:00
2016-02-23 20:55:05 +00:00
sql_l = [ [ " DELETE FROM tv_episodes WHERE showid = ? AND indexer = ? " , [ self . indexerid , self . indexer ] ] ,
[ " DELETE FROM tv_shows WHERE indexer_id = ? AND indexer = ? " , [ self . indexerid , self . indexer ] ] ,
2014-05-29 13:27:05 +00:00
[ " DELETE FROM imdb_info WHERE indexer_id = ? " , [ self . indexerid ] ] ,
2016-02-23 20:55:05 +00:00
[ " DELETE FROM xem_refresh WHERE indexer_id = ? AND indexer = ? " , [ self . indexerid , self . indexer ] ] ,
[ " DELETE FROM scene_numbering WHERE indexer_id = ? AND indexer = ? " , [ self . indexerid , self . indexer ] ] ,
2015-02-16 03:17:56 +00:00
[ " DELETE FROM whitelist WHERE show_id = ? " , [ self . indexerid ] ] ,
2016-02-23 20:55:05 +00:00
[ " DELETE FROM blacklist WHERE show_id = ? " , [ self . indexerid ] ] ,
[ " DELETE FROM indexer_mapping WHERE indexer_id = ? AND indexer = ? " , [ self . indexerid , self . indexer ] ] ]
2014-03-20 10:24:58 +00:00
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
myDB . mass_action ( sql_l )
2014-06-30 15:57:32 +00:00
2015-05-19 14:38:33 +00:00
name_cache . remove_from_namecache ( self . indexerid )
2014-11-05 05:36:16 +00:00
action = ( ' delete ' , ' trash ' ) [ sickbeard . TRASH_REMOVE_SHOW ]
2014-03-10 05:18:05 +00:00
# remove self from show list
2014-03-25 05:57:24 +00:00
sickbeard . showList = [ x for x in sickbeard . showList if int ( x . indexerid ) != self . indexerid ]
2014-03-10 05:18:05 +00:00
# clear the cache
image_cache_dir = ek . ek ( os . path . join , sickbeard . CACHE_DIR , ' images ' )
2016-09-04 20:00:44 +00:00
for path , dirs , files in ek . ek ( os . walk , image_cache_dir ) :
for filename in ek . ek ( fnmatch . filter , files , ' %s .* ' % self . indexerid ) :
cache_file = ek . ek ( os . path . join , path , filename )
logger . log ( ' Attempt to %s cache file %s ' % ( action , cache_file ) )
try :
if sickbeard . TRASH_REMOVE_SHOW :
send2trash ( cache_file )
else :
os . remove ( cache_file )
2014-11-05 05:36:16 +00:00
2016-09-04 20:00:44 +00:00
except OSError as e :
logger . log ( ' Unable to %s %s : %s / %s ' % ( action , cache_file , repr ( e ) , str ( e ) ) , logger . WARNING )
2014-03-10 05:18:05 +00:00
2014-08-29 10:29:56 +00:00
# remove entire show folder
if full :
try :
2016-02-23 20:55:05 +00:00
logger . log ( ' Attempt to %s show folder %s ' % ( action , self . _location ) )
2014-08-29 10:29:56 +00:00
# check first the read-only attribute
file_attribute = ek . ek ( os . stat , self . location ) [ 0 ]
if ( not file_attribute & stat . S_IWRITE ) :
# File is read-only, so make it writeable
2014-11-05 05:36:16 +00:00
logger . log ( ' Attempting to make writeable the read only folder %s ' % self . _location , logger . DEBUG )
2014-08-29 10:29:56 +00:00
try :
ek . ek ( os . chmod , self . location , stat . S_IWRITE )
except :
2016-02-23 20:55:05 +00:00
logger . log ( ' Unable to change permissions of %s ' % self . _location , logger . WARNING )
2014-11-05 05:36:16 +00:00
if sickbeard . TRASH_REMOVE_SHOW :
send2trash ( self . location )
else :
ek . ek ( shutil . rmtree , self . location )
2016-02-23 20:55:05 +00:00
logger . log ( ' %s show folder %s ' %
2014-11-05 05:36:16 +00:00
( ( ' Deleted ' , ' Trashed ' ) [ sickbeard . TRASH_REMOVE_SHOW ] ,
self . _location ) )
2014-08-29 10:29:56 +00:00
2014-11-05 05:36:16 +00:00
except exceptions . ShowDirNotFoundException :
2016-02-23 20:55:05 +00:00
logger . log ( ' Show folder does not exist, no need to %s %s ' % ( action , self . _location ) , logger . WARNING )
2015-06-08 12:47:01 +00:00
except OSError as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' Unable to %s %s : %s / %s ' % ( action , self . _location , repr ( e ) , str ( e ) ) , logger . WARNING )
2014-08-29 10:29:56 +00:00
2014-03-10 05:18:05 +00:00
def populateCache ( self ) :
cache_inst = image_cache . ImageCache ( )
2014-03-25 05:57:24 +00:00
2016-02-23 20:55:05 +00:00
logger . log ( ' Checking & filling cache for show %s ' % self . name )
2014-03-10 05:18:05 +00:00
cache_inst . fill_cache ( self )
def refreshDir ( self ) :
# make sure the show dir is where we think it is unless dirs are created on the fly
if not ek . ek ( os . path . isdir , self . _location ) and not sickbeard . CREATE_MISSING_SHOW_DIRS :
return False
# load from dir
self . loadEpisodesFromDir ( )
# run through all locations from DB, check that they exist
2016-02-01 19:37:32 +00:00
logger . log ( ' %s : Loading all episodes with a location from the database ' % self . indexerid )
2014-03-10 05:18:05 +00:00
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
sqlResults = myDB . select ( " SELECT * FROM tv_episodes WHERE showid = ? AND location != ' ' " , [ self . indexerid ] )
2014-03-10 05:18:05 +00:00
2014-05-30 10:01:49 +00:00
sql_l = [ ]
2014-03-10 05:18:05 +00:00
for ep in sqlResults :
2016-02-23 20:55:05 +00:00
curLoc = os . path . normpath ( ep [ ' location ' ] )
season = int ( ep [ ' season ' ] )
episode = int ( ep [ ' episode ' ] )
2014-03-10 05:18:05 +00:00
try :
curEp = self . getEpisode ( season , episode )
except exceptions . EpisodeDeletedException :
2016-02-23 20:55:05 +00:00
logger . log ( ' The episode was deleted while we were refreshing it, moving on to the next one ' ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
continue
# if the path doesn't exist or if it's not in our show dir
2014-03-25 05:57:24 +00:00
if not ek . ek ( os . path . isfile , curLoc ) or not os . path . normpath ( curLoc ) . startswith (
os . path . normpath ( self . location ) ) :
2014-03-10 05:18:05 +00:00
2014-05-17 22:14:31 +00:00
# check if downloaded files still exist, update our data if this has changed
2015-03-17 13:59:03 +00:00
if 1 != sickbeard . SKIP_REMOVED_FILES :
2014-05-17 22:14:31 +00:00
with curEp . lock :
# if it used to have a file associated with it and it doesn't anymore then set it to IGNORED
if curEp . location and curEp . status in Quality . DOWNLOADED :
2015-03-17 13:59:03 +00:00
curEp . status = ( sickbeard . SKIP_REMOVED_FILES , IGNORED ) [ not sickbeard . SKIP_REMOVED_FILES ]
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : File no longer at location for s %02d e %02d , episode removed and status changed to %s '
2015-03-17 13:59:03 +00:00
% ( str ( self . indexerid ) , season , episode , statusStrings [ curEp . status ] ) ,
2014-05-27 07:44:23 +00:00
logger . DEBUG )
2014-05-17 22:14:31 +00:00
curEp . subtitles = list ( )
curEp . subtitles_searchcount = 0
curEp . subtitles_lastsearch = str ( datetime . datetime . min )
curEp . location = ' '
curEp . hasnfo = False
curEp . hastbn = False
curEp . release_name = ' '
2014-05-30 10:01:49 +00:00
2015-03-29 11:20:29 +00:00
result = curEp . get_sql ( )
if None is not result :
sql_l . append ( result )
2014-05-14 12:33:36 +00:00
else :
# the file exists, set its modify file stamp
if sickbeard . AIRDATE_EPISODES :
2014-07-21 13:29:07 +00:00
curEp . airdateModifyStamp ( )
2014-05-14 12:33:36 +00:00
2015-03-29 11:20:29 +00:00
if 0 < len ( sql_l ) :
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
myDB . mass_action ( sql_l )
2014-06-30 15:57:32 +00:00
2014-03-10 05:18:05 +00:00
def downloadSubtitles ( self , force = False ) :
2014-05-27 07:44:23 +00:00
# TODO: Add support for force option
2014-03-10 05:18:05 +00:00
if not ek . ek ( os . path . isdir , self . _location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Show directory doesn \' t exist, can \' t download subtitles ' % self . indexerid , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Downloading subtitles ' % self . indexerid , logger . DEBUG )
2014-03-25 05:57:24 +00:00
2014-03-10 05:18:05 +00:00
try :
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
episodes = myDB . select (
" SELECT location FROM tv_episodes WHERE showid = ? AND location NOT LIKE ' ' ORDER BY season DESC, episode DESC " ,
[ self . indexerid ] )
2014-06-07 21:32:38 +00:00
2014-03-10 05:18:05 +00:00
for episodeLoc in episodes :
episode = self . makeEpFromFile ( episodeLoc [ ' location ' ] )
subtitles = episode . downloadSubtitles ( force = force )
except Exception as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' Error occurred when downloading subtitles: %s ' % traceback . format_exc ( ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return
2016-09-04 20:00:44 +00:00
def switchIndexer ( self , old_indexer , old_indexerid , pausestatus_after = None ) :
myDB = db . DBConnection ( )
myDB . mass_action ( [ [ ' UPDATE tv_shows SET indexer = ?, indexer_id = ? WHERE indexer = ? AND indexer_id = ? ' ,
[ self . indexer , self . indexerid , old_indexer , old_indexerid ] ] ,
[ ' UPDATE tv_episodes SET showid = ?, indexer = ?, indexerid = 0 WHERE indexer = ? AND showid = ? ' ,
[ self . indexerid , self . indexer , old_indexer , old_indexerid ] ] ,
[ ' UPDATE blacklist SET show_id = ? WHERE show_id = ? ' , [ self . indexerid , old_indexerid ] ] ,
[ ' UPDATE history SET showid = ? WHERE showid = ? ' , [ self . indexerid , old_indexerid ] ] ,
[ ' UPDATE imdb_info SET indexer_id = ? WHERE indexer_id = ? ' , [ self . indexerid , old_indexerid ] ] ,
[ ' UPDATE scene_exceptions SET indexer_id = ? WHERE indexer_id = ? ' , [ self . indexerid , old_indexerid ] ] ,
[ ' UPDATE scene_numbering SET indexer = ?, indexer_id = ? WHERE indexer = ? AND indexer_id = ? ' ,
[ self . indexer , self . indexerid , old_indexer , old_indexerid ] ] ,
[ ' UPDATE whitelist SET show_id = ? WHERE show_id = ? ' , [ self . indexerid , old_indexerid ] ] ,
[ ' UPDATE xem_refresh SET indexer = ?, indexer_id = ? WHERE indexer = ? AND indexer_id = ? ' ,
[ self . indexer , self . indexerid , old_indexer , old_indexerid ] ] ] )
myFailedDB = db . DBConnection ( ' failed.db ' )
myFailedDB . action ( ' UPDATE history SET showid = ? WHERE showid = ? ' , [ self . indexerid , old_indexerid ] )
del_mapping ( old_indexer , old_indexerid )
self . ids [ old_indexer ] [ ' status ' ] = MapStatus . NONE
self . ids [ self . indexer ] [ ' status ' ] = MapStatus . SOURCE
save_mapping ( self )
name_cache . remove_from_namecache ( old_indexerid )
image_cache_dir = ek . ek ( os . path . join , sickbeard . CACHE_DIR , ' images ' )
for path , dirs , files in ek . ek ( os . walk , image_cache_dir ) :
for filename in ek . ek ( fnmatch . filter , files , ' %s .* ' % old_indexerid ) :
cache_file = ek . ek ( os . path . join , path , filename )
new_cachefile = ek . ek ( os . path . join , path , filename . replace ( str ( old_indexerid ) , str ( self . indexerid ) ) )
try :
helpers . moveFile ( cache_file , new_cachefile )
except Exception as e :
logger . log ( ' Unable to rename %s to %s : %s / %s ' % ( cache_file , new_cachefile , repr ( e ) , str ( e ) ) , logger . WARNING )
name_cache . buildNameCache ( self )
# force the update
try :
sickbeard . showQueueScheduler . action . updateShow (
self , force = True , web = True , priority = QueuePriorities . VERYHIGH , pausestatus_after = pausestatus_after )
except exceptions . CantUpdateException as e :
logger . log ( ' Unable to update this show. %s ' % ex ( e ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
2014-06-24 03:25:20 +00:00
def saveToDB ( self , forceSave = False ) :
if not self . dirty and not forceSave :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Not saving show to db - record is not dirty ' % self . indexerid , logger . DEBUG )
2014-06-24 03:25:20 +00:00
return
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Saving show info to database ' % self . indexerid , logger . DEBUG )
controlValueDict = { ' indexer_id ' : self . indexerid }
newValueDict = { ' indexer ' : self . indexer ,
' show_name ' : self . name ,
' location ' : self . _location ,
' network ' : self . network ,
' genre ' : self . genre ,
' classification ' : self . classification ,
' runtime ' : self . runtime ,
' quality ' : self . quality ,
' airs ' : self . airs ,
' status ' : self . status ,
' flatten_folders ' : self . flatten_folders ,
' paused ' : self . paused ,
' air_by_date ' : self . air_by_date ,
' anime ' : self . anime ,
' scene ' : self . scene ,
' sports ' : self . sports ,
' subtitles ' : self . subtitles ,
' dvdorder ' : self . dvdorder ,
' archive_firstmatch ' : self . archive_firstmatch ,
' startyear ' : self . startyear ,
' lang ' : self . lang ,
' imdb_id ' : self . imdbid ,
' last_update_indexer ' : self . last_update_indexer ,
' rls_ignore_words ' : self . rls_ignore_words ,
2015-03-22 11:52:56 +00:00
' rls_require_words ' : self . rls_require_words ,
2015-04-07 03:10:50 +00:00
' overview ' : self . overview ,
' tag ' : self . tag ,
2014-03-25 05:57:24 +00:00
}
2014-06-07 21:32:38 +00:00
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2016-02-23 20:55:05 +00:00
myDB . upsert ( ' tv_shows ' , newValueDict , controlValueDict )
2016-02-01 19:37:32 +00:00
self . dirty = False
2014-06-07 21:32:38 +00:00
2015-03-27 19:25:34 +00:00
if sickbeard . USE_IMDB_INFO and len ( self . imdb_info ) :
2015-03-14 02:48:38 +00:00
controlValueDict = { ' indexer_id ' : self . indexerid }
2014-03-10 05:18:05 +00:00
newValueDict = self . imdb_info
2014-03-25 05:57:24 +00:00
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2015-03-14 02:48:38 +00:00
myDB . upsert ( ' imdb_info ' , newValueDict , controlValueDict )
2014-03-10 05:18:05 +00:00
def __str__ ( self ) :
2016-02-23 20:55:05 +00:00
toReturn = ' '
toReturn + = ' indexerid: %s \n ' % self . indexerid
toReturn + = ' indexer: %s \n ' % self . indexer
toReturn + = ' name: %s \n ' % self . name
toReturn + = ' location: %s \n ' % self . _location
2014-03-10 05:18:05 +00:00
if self . network :
2016-02-23 20:55:05 +00:00
toReturn + = ' network: %s \n ' % self . network
2014-03-10 05:18:05 +00:00
if self . airs :
2016-02-23 20:55:05 +00:00
toReturn + = ' airs: %s \n ' % self . airs
2014-03-10 05:18:05 +00:00
if self . status :
2016-02-23 20:55:05 +00:00
toReturn + = ' status: %s \n ' % self . status
toReturn + = ' startyear: %s \n ' % self . startyear
2014-05-18 15:57:07 +00:00
if self . genre :
2016-02-23 20:55:05 +00:00
toReturn + = ' genre: %s \n ' % self . genre
toReturn + = ' classification: %s \n ' % self . classification
toReturn + = ' runtime: %s \n ' % self . runtime
toReturn + = ' quality: %s \n ' % self . quality
toReturn + = ' scene: %s \n ' % self . is_scene
toReturn + = ' sports: %s \n ' % self . is_sports
toReturn + = ' anime: %s \n ' % self . is_anime
2014-03-10 05:18:05 +00:00
return toReturn
def wantEpisode ( self , season , episode , quality , manualSearch = False ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' Checking if found episode %s x %s is wanted at quality %s ' %
( season , episode , Quality . qualityStrings [ quality ] ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# if the quality isn't one we want under any circumstances then just say no
2015-04-05 18:12:15 +00:00
initialQualities , archiveQualities = Quality . splitQuality ( self . quality )
allQualities = list ( set ( initialQualities + archiveQualities ) )
2015-06-19 23:34:56 +00:00
2016-02-23 20:55:05 +00:00
initial = ' = ( %s ) ' % ' , ' . join ( [ Quality . qualityStrings [ qual ] for qual in initialQualities ] )
2015-06-19 23:34:56 +00:00
if 0 < len ( archiveQualities ) :
2016-02-23 20:55:05 +00:00
initial = ' + upgrade to %s + ( %s ) ' \
2015-06-19 23:34:56 +00:00
% ( initial , ' , ' . join ( [ Quality . qualityStrings [ qual ] for qual in archiveQualities ] ) )
2016-02-23 20:55:05 +00:00
logger . log ( ' Want initial %s and found %s ' % ( initial , Quality . qualityStrings [ quality ] ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2015-04-05 18:12:15 +00:00
if quality not in allQualities :
2016-02-23 20:55:05 +00:00
logger . log ( ' Don \' t want this quality, ignoring found episode ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return False
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2016-02-23 20:55:05 +00:00
sqlResults = myDB . select ( ' SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? ' ,
2014-06-21 22:46:59 +00:00
[ self . indexerid , season , episode ] )
2014-03-10 05:18:05 +00:00
if not sqlResults or not len ( sqlResults ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' Unable to find a matching episode in database, ignoring found episode ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return False
epStatus = int ( sqlResults [ 0 ] [ " status " ] )
2014-03-20 08:15:22 +00:00
epStatus_text = statusStrings [ epStatus ]
2014-03-10 05:18:05 +00:00
2016-02-23 20:55:05 +00:00
logger . log ( ' Existing episode status: %s ( %s ) ' % ( statusStrings [ epStatus ] , epStatus_text ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# if we know we don't want it then just say no
if epStatus in ( SKIPPED , IGNORED , ARCHIVED ) and not manualSearch :
2016-02-23 20:55:05 +00:00
logger . log ( ' Existing episode status is skipped/ignored/archived, ignoring found episode ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return False
# if it's one of these then we want it as long as it's in our allowed initial qualities
2015-04-05 18:12:15 +00:00
if quality in allQualities :
if epStatus in ( WANTED , UNAIRED , SKIPPED , FAILED ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' Existing episode status is wanted/unaired/skipped/failed, getting found episode ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return True
elif manualSearch :
2014-03-25 05:57:24 +00:00
logger . log (
2016-02-23 20:55:05 +00:00
' Usually ignoring found episode, but forced search allows the quality, getting found episode ' ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
return True
else :
2016-02-23 20:55:05 +00:00
logger . log ( ' Quality is on wanted list, need to check if it \' s better than existing quality ' ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
curStatus , curQuality = Quality . splitCompositeStatus ( epStatus )
2015-04-05 18:12:15 +00:00
downloadedStatusList = ( DOWNLOADED , SNATCHED , SNATCHED_PROPER , SNATCHED_BEST )
# special case: already downloaded quality is not in any of the wanted Qualities
if curStatus in downloadedStatusList and curQuality not in allQualities :
wantedQualities = allQualities
else :
wantedQualities = archiveQualities
2014-03-10 05:18:05 +00:00
2015-04-05 18:12:15 +00:00
# if we are re-downloading then we only want it if it's in our archiveQualities list and better than what we have
if curStatus in downloadedStatusList and quality in wantedQualities and quality > curQuality :
2016-02-23 20:55:05 +00:00
logger . log ( ' Episode already exists but the found episode has better quality, getting found episode ' ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
return True
2014-03-20 08:15:22 +00:00
else :
2016-02-23 20:55:05 +00:00
logger . log ( ' Episode already exists and the found episode has same/lower quality, ignoring found episode ' ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
2016-02-23 20:55:05 +00:00
logger . log ( ' None of the conditions were met, ignoring found episode ' , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return False
def getOverview ( self , epStatus ) :
2015-11-19 22:05:19 +00:00
status , quality = Quality . splitCompositeStatus ( epStatus )
if ARCHIVED == status :
2015-08-22 17:10:37 +00:00
return Overview . GOOD
2015-11-19 22:05:19 +00:00
if WANTED == status :
2014-03-10 05:18:05 +00:00
return Overview . WANTED
2015-11-19 22:05:19 +00:00
if status in ( SKIPPED , IGNORED ) :
2014-03-10 05:18:05 +00:00
return Overview . SKIPPED
2015-11-19 22:05:19 +00:00
if status in ( UNAIRED , UNKNOWN ) :
2015-08-22 17:10:37 +00:00
return Overview . UNAIRED
2015-11-19 22:05:19 +00:00
if status in Quality . DOWNLOADED + Quality . SNATCHED + Quality . SNATCHED_PROPER + Quality . FAILED + Quality . SNATCHED_BEST :
2014-03-10 05:18:05 +00:00
2015-08-22 17:10:37 +00:00
if FAILED == status :
2014-03-10 05:18:05 +00:00
return Overview . WANTED
2015-08-22 17:10:37 +00:00
if status in ( SNATCHED , SNATCHED_PROPER , SNATCHED_BEST ) :
2014-03-10 05:18:05 +00:00
return Overview . SNATCHED
2015-08-22 17:10:37 +00:00
void , best_qualities = Quality . splitQuality ( self . quality )
# if re-downloads aren't wanted then mark it "good" if there is anything
if not len ( best_qualities ) :
2014-03-10 05:18:05 +00:00
return Overview . GOOD
2015-08-22 17:10:37 +00:00
min_best , max_best = min ( best_qualities ) , max ( best_qualities )
if quality > = max_best \
or ( self . archive_firstmatch and
( quality in best_qualities or ( None is not min_best and quality > min_best ) ) ) :
2014-03-10 05:18:05 +00:00
return Overview . GOOD
2015-08-22 17:10:37 +00:00
return Overview . QUAL
2014-03-10 05:18:05 +00:00
2014-07-15 02:00:53 +00:00
def __getstate__ ( self ) :
d = dict ( self . __dict__ )
del d [ ' lock ' ]
return d
def __setstate__ ( self , d ) :
d [ ' lock ' ] = threading . Lock ( )
self . __dict__ . update ( d )
2014-08-18 12:40:29 +00:00
2014-03-10 05:18:05 +00:00
class TVEpisode ( object ) :
2016-02-23 20:55:05 +00:00
def __init__ ( self , show , season , episode , file = ' ' ) :
self . _name = ' '
2014-05-03 12:07:44 +00:00
self . _season = season
self . _episode = episode
2014-05-26 06:29:22 +00:00
self . _absolute_number = 0
2016-02-23 20:55:05 +00:00
self . _description = ' '
2014-03-10 05:18:05 +00:00
self . _subtitles = list ( )
self . _subtitles_searchcount = 0
self . _subtitles_lastsearch = str ( datetime . datetime . min )
self . _airdate = datetime . date . fromordinal ( 1 )
self . _hasnfo = False
self . _hastbn = False
self . _status = UNKNOWN
self . _indexerid = 0
self . _file_size = 0
self . _release_name = ' '
self . _is_proper = False
2014-07-22 04:53:32 +00:00
self . _version = 0
self . _release_group = ' '
2014-03-10 05:18:05 +00:00
# setting any of the above sets the dirty flag
self . dirty = True
self . show = show
2014-05-30 10:01:49 +00:00
self . scene_season = 0
self . scene_episode = 0
self . scene_absolute_number = 0
2014-03-10 05:18:05 +00:00
self . _location = file
2014-05-05 05:50:28 +00:00
self . _indexer = int ( self . show . indexer )
2014-03-10 05:18:05 +00:00
self . lock = threading . Lock ( )
2014-05-05 03:04:46 +00:00
self . specifyEpisode ( self . season , self . episode )
2014-03-10 05:18:05 +00:00
self . relatedEps = [ ]
self . checkForMetaFiles ( )
2014-09-20 12:03:48 +00:00
self . wantedQuality = [ ]
2016-02-23 20:55:05 +00:00
name = property ( lambda self : self . _name , dirty_setter ( ' _name ' ) )
season = property ( lambda self : self . _season , dirty_setter ( ' _season ' ) )
episode = property ( lambda self : self . _episode , dirty_setter ( ' _episode ' ) )
absolute_number = property ( lambda self : self . _absolute_number , dirty_setter ( ' _absolute_number ' ) )
description = property ( lambda self : self . _description , dirty_setter ( ' _description ' ) )
subtitles = property ( lambda self : self . _subtitles , dirty_setter ( ' _subtitles ' ) )
subtitles_searchcount = property ( lambda self : self . _subtitles_searchcount , dirty_setter ( ' _subtitles_searchcount ' ) )
subtitles_lastsearch = property ( lambda self : self . _subtitles_lastsearch , dirty_setter ( ' _subtitles_lastsearch ' ) )
airdate = property ( lambda self : self . _airdate , dirty_setter ( ' _airdate ' ) )
hasnfo = property ( lambda self : self . _hasnfo , dirty_setter ( ' _hasnfo ' ) )
hastbn = property ( lambda self : self . _hastbn , dirty_setter ( ' _hastbn ' ) )
status = property ( lambda self : self . _status , dirty_setter ( ' _status ' ) )
indexer = property ( lambda self : self . _indexer , dirty_setter ( ' _indexer ' ) )
indexerid = property ( lambda self : self . _indexerid , dirty_setter ( ' _indexerid ' ) )
# location = property(lambda self: self._location, dirty_setter('_location'))
file_size = property ( lambda self : self . _file_size , dirty_setter ( ' _file_size ' ) )
release_name = property ( lambda self : self . _release_name , dirty_setter ( ' _release_name ' ) )
is_proper = property ( lambda self : self . _is_proper , dirty_setter ( ' _is_proper ' ) )
version = property ( lambda self : self . _version , dirty_setter ( ' _version ' ) )
release_group = property ( lambda self : self . _release_group , dirty_setter ( ' _release_group ' ) )
2014-03-10 05:18:05 +00:00
def _set_location ( self , new_location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' Setter sets location to %s ' % new_location , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2014-05-27 07:44:23 +00:00
# self._location = newLocation
2016-02-23 20:55:05 +00:00
dirty_setter ( ' _location ' ) ( self , new_location )
2014-03-10 05:18:05 +00:00
if new_location and ek . ek ( os . path . isfile , new_location ) :
self . file_size = ek . ek ( os . path . getsize , new_location )
else :
self . file_size = 0
location = property ( lambda self : self . _location , _set_location )
def refreshSubtitles ( self ) :
""" Look for subtitles files and refresh the subtitles property """
self . subtitles = subtitles . subtitlesLanguages ( self . location )
2014-03-25 05:57:24 +00:00
def downloadSubtitles ( self , force = False ) :
2014-05-27 07:44:23 +00:00
# TODO: Add support for force option
2014-03-10 05:18:05 +00:00
if not ek . ek ( os . path . isfile , self . location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Episode file doesn \' t exist, can \' t download subtitles for episode %s x %s ' %
( self . show . indexerid , self . season , self . episode ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Downloading subtitles for episode %s x %s ' % ( self . show . indexerid , self . season , self . episode ) ,
logger . DEBUG )
2014-03-10 05:18:05 +00:00
previous_subtitles = self . subtitles
try :
2014-05-15 01:41:08 +00:00
need_languages = set ( sickbeard . SUBTITLES_LANGUAGES ) - set ( self . subtitles )
subtitles = subliminal . download_subtitles ( [ self . location ] , languages = need_languages ,
services = sickbeard . subtitles . getEnabledServiceList ( ) , force = force ,
multi = True , cache_dir = sickbeard . CACHE_DIR )
2014-03-10 05:18:05 +00:00
if sickbeard . SUBTITLES_DIR :
2014-05-15 01:41:08 +00:00
for video in subtitles :
2014-03-10 05:18:05 +00:00
subs_new_path = ek . ek ( os . path . join , os . path . dirname ( video . path ) , sickbeard . SUBTITLES_DIR )
dir_exists = helpers . makeDir ( subs_new_path )
if not dir_exists :
2016-02-23 20:55:05 +00:00
logger . log ( ' Unable to create subtitles folder %s ' % subs_new_path , logger . ERROR )
2014-03-10 05:18:05 +00:00
else :
helpers . chmodAsParent ( subs_new_path )
2014-05-15 01:41:08 +00:00
for subtitle in subtitles . get ( video ) :
2014-03-10 05:18:05 +00:00
new_file_path = ek . ek ( os . path . join , subs_new_path , os . path . basename ( subtitle . path ) )
helpers . moveFile ( subtitle . path , new_file_path )
helpers . chmodAsParent ( new_file_path )
else :
2014-05-15 01:41:08 +00:00
for video in subtitles :
for subtitle in subtitles . get ( video ) :
2014-03-10 05:18:05 +00:00
helpers . chmodAsParent ( subtitle . path )
except Exception as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' Error occurred when downloading subtitles: %s ' % traceback . format_exc ( ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
return
self . refreshSubtitles ( )
2014-05-27 07:44:23 +00:00
self . subtitles_searchcount = self . subtitles_searchcount + 1 if self . subtitles_searchcount else 1 # added the if because sometime it raise an error
2016-02-23 20:55:05 +00:00
self . subtitles_lastsearch = datetime . datetime . now ( ) . strftime ( ' % Y- % m- %d % H: % M: % S ' )
2014-03-10 05:18:05 +00:00
self . saveToDB ( )
newsubtitles = set ( self . subtitles ) . difference ( set ( previous_subtitles ) )
if newsubtitles :
2014-05-15 01:41:08 +00:00
subtitleList = " , " . join ( subliminal . language . Language ( x ) . name for x in newsubtitles )
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Downloaded %s subtitles for episode %s x %s ' %
( self . show . indexerid , subtitleList , self . season , self . episode ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
notifiers . notify_subtitle_download ( self . prettyName ( ) , subtitleList )
else :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : No subtitles downloaded for episode %s x %s ' % ( self . show . indexerid , self . season , self . episode ) ,
logger . DEBUG )
2014-03-10 05:18:05 +00:00
if sickbeard . SUBTITLES_HISTORY :
2014-05-15 01:41:08 +00:00
for video in subtitles :
for subtitle in subtitles . get ( video ) :
2014-03-10 05:18:05 +00:00
history . logSubtitle ( self . show . indexerid , self . season , self . episode , self . status , subtitle )
return subtitles
def checkForMetaFiles ( self ) :
oldhasnfo = self . hasnfo
oldhastbn = self . hastbn
cur_nfo = False
cur_tbn = False
# check for nfo and tbn
2014-05-05 05:50:28 +00:00
if ek . ek ( os . path . isfile , self . location ) :
2014-03-10 05:18:05 +00:00
for cur_provider in sickbeard . metadata_provider_dict . values ( ) :
if cur_provider . episode_metadata :
new_result = cur_provider . _has_episode_metadata ( self )
else :
new_result = False
cur_nfo = new_result or cur_nfo
if cur_provider . episode_thumbnails :
new_result = cur_provider . _has_episode_thumb ( self )
else :
new_result = False
cur_tbn = new_result or cur_tbn
self . hasnfo = cur_nfo
self . hastbn = cur_tbn
# if either setting has changed return true, if not return false
return oldhasnfo != self . hasnfo or oldhastbn != self . hastbn
2014-05-05 03:04:46 +00:00
def specifyEpisode ( self , season , episode ) :
2014-03-10 05:18:05 +00:00
2014-05-04 00:22:33 +00:00
sqlResult = self . loadFromDB ( season , episode )
2014-03-10 05:18:05 +00:00
2014-05-05 05:50:28 +00:00
if not sqlResult :
2014-03-10 05:18:05 +00:00
# only load from NFO if we didn't load from DB
if ek . ek ( os . path . isfile , self . location ) :
try :
self . loadFromNFO ( self . location )
except exceptions . NoNFOException :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : There was an error loading the NFO for episode %s x %s ' %
( self . show . indexerid , season , episode ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
pass
2014-03-20 13:45:43 +00:00
# if we tried loading it from NFO and didn't find the NFO, try the Indexers
2014-05-02 11:33:06 +00:00
if not self . hasnfo :
2014-03-10 05:18:05 +00:00
try :
2014-05-05 03:04:46 +00:00
result = self . loadFromIndexer ( season , episode )
2014-03-10 05:18:05 +00:00
except exceptions . EpisodeDeletedException :
result = False
2014-03-20 13:45:43 +00:00
# if we failed SQL *and* NFO, Indexers then fail
2014-05-02 11:33:06 +00:00
if not result :
2014-03-25 05:57:24 +00:00
raise exceptions . EpisodeNotFoundException (
2016-02-23 20:55:05 +00:00
' Couldn \' t find episode %s x %s ' % ( season , episode ) )
2014-03-10 05:18:05 +00:00
2014-05-04 00:22:33 +00:00
def loadFromDB ( self , season , episode ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Loading episode details from DB for episode %s x %s ' % ( self . show . indexerid , season , episode ) ,
logger . DEBUG )
2014-03-10 05:18:05 +00:00
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2015-05-14 06:12:06 +00:00
sql_results = myDB . select ( ' SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? ' ,
2014-06-21 22:46:59 +00:00
[ self . show . indexerid , season , episode ] )
2014-03-10 05:18:05 +00:00
2015-05-14 06:12:06 +00:00
if len ( sql_results ) > 1 :
raise exceptions . MultipleDBEpisodesException ( ' Your DB has two records for the same show somehow. ' )
elif len ( sql_results ) == 0 :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Episode %s x %s not found in the database ' % ( self . show . indexerid , self . season , self . episode ) ,
logger . DEBUG )
2014-03-10 05:18:05 +00:00
return False
else :
2015-05-14 06:12:06 +00:00
# NAMEIT logger.log(u'AAAAA from' + str(self.season)+'x'+str(self.episode) + ' -' + self.name + ' to ' + str(sql_results[0]['name']))
if sql_results [ 0 ] [ ' name ' ] :
self . name = sql_results [ 0 ] [ ' name ' ]
2014-05-03 09:23:26 +00:00
2014-03-10 05:18:05 +00:00
self . season = season
self . episode = episode
2015-05-14 06:12:06 +00:00
self . absolute_number = sql_results [ 0 ] [ ' absolute_number ' ]
self . description = sql_results [ 0 ] [ ' description ' ]
2014-03-10 05:18:05 +00:00
if not self . description :
2015-05-14 06:12:06 +00:00
self . description = ' '
if sql_results [ 0 ] [ ' subtitles ' ] and sql_results [ 0 ] [ ' subtitles ' ] :
self . subtitles = sql_results [ 0 ] [ ' subtitles ' ] . split ( ' , ' )
self . subtitles_searchcount = sql_results [ 0 ] [ ' subtitles_searchcount ' ]
self . subtitles_lastsearch = sql_results [ 0 ] [ ' subtitles_lastsearch ' ]
self . airdate = datetime . date . fromordinal ( int ( sql_results [ 0 ] [ ' airdate ' ] ) )
# logger.log(u'1 Status changes from ' + str(self.status) + ' to ' + str(sql_results[0]['status']), logger.DEBUG)
if sql_results [ 0 ] [ ' status ' ] is not None :
self . status = int ( sql_results [ 0 ] [ ' status ' ] )
2014-03-10 05:18:05 +00:00
# don't overwrite my location
2015-05-14 06:12:06 +00:00
if sql_results [ 0 ] [ ' location ' ] and sql_results [ 0 ] [ ' location ' ] :
self . location = os . path . normpath ( sql_results [ 0 ] [ ' location ' ] )
if sql_results [ 0 ] [ ' file_size ' ] :
self . file_size = int ( sql_results [ 0 ] [ ' file_size ' ] )
2014-03-10 05:18:05 +00:00
else :
self . file_size = 0
2015-05-14 06:12:06 +00:00
self . indexerid = int ( sql_results [ 0 ] [ ' indexerid ' ] )
self . indexer = int ( sql_results [ 0 ] [ ' indexer ' ] )
2014-03-10 05:18:05 +00:00
2014-08-18 12:40:29 +00:00
sickbeard . scene_numbering . xem_refresh ( self . show . indexerid , self . show . indexer )
2014-05-27 07:44:23 +00:00
2014-06-07 18:36:26 +00:00
try :
2015-05-14 06:12:06 +00:00
self . scene_season = int ( sql_results [ 0 ] [ ' scene_season ' ] )
2014-08-18 12:48:23 +00:00
except :
self . scene_season = 0
try :
2015-05-14 06:12:06 +00:00
self . scene_episode = int ( sql_results [ 0 ] [ ' scene_episode ' ] )
2014-06-07 18:36:26 +00:00
except :
2014-08-18 12:48:23 +00:00
self . scene_episode = 0
2014-05-27 07:44:23 +00:00
2014-06-07 18:36:26 +00:00
try :
2015-05-14 06:12:06 +00:00
self . scene_absolute_number = int ( sql_results [ 0 ] [ ' scene_absolute_number ' ] )
2014-06-07 18:36:26 +00:00
except :
2014-08-18 12:48:23 +00:00
self . scene_absolute_number = 0
if self . scene_absolute_number == 0 :
2014-08-18 12:40:29 +00:00
self . scene_absolute_number = sickbeard . scene_numbering . get_scene_absolute_numbering (
self . show . indexerid ,
self . show . indexer ,
self . absolute_number
)
2014-05-27 07:44:23 +00:00
2014-08-18 12:48:23 +00:00
if self . scene_season == 0 or self . scene_episode == 0 :
self . scene_season , self . scene_episode = sickbeard . scene_numbering . get_scene_numbering (
self . show . indexerid ,
self . show . indexer ,
self . season , self . episode
)
2015-05-14 06:12:06 +00:00
if sql_results [ 0 ] [ ' release_name ' ] is not None :
self . release_name = sql_results [ 0 ] [ ' release_name ' ]
2014-03-10 05:18:05 +00:00
2015-05-14 06:12:06 +00:00
if sql_results [ 0 ] [ ' is_proper ' ] :
self . is_proper = int ( sql_results [ 0 ] [ ' is_proper ' ] )
2014-03-10 05:18:05 +00:00
2015-05-14 06:12:06 +00:00
if sql_results [ 0 ] [ ' version ' ] :
self . version = int ( sql_results [ 0 ] [ ' version ' ] )
2014-07-22 04:53:32 +00:00
2015-05-14 06:12:06 +00:00
if sql_results [ 0 ] [ ' release_group ' ] is not None :
self . release_group = sql_results [ 0 ] [ ' release_group ' ]
2014-07-22 04:53:32 +00:00
2014-03-10 05:18:05 +00:00
self . dirty = False
return True
2016-02-23 20:55:05 +00:00
def loadFromIndexer ( self , season = None , episode = None , cache = True , tvapi = None , cachedSeason = None , update = False ) :
2014-03-10 05:18:05 +00:00
2016-09-04 20:00:44 +00:00
if None is season :
2014-03-10 05:18:05 +00:00
season = self . season
2016-09-04 20:00:44 +00:00
if None is episode :
2014-03-10 05:18:05 +00:00
episode = self . episode
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Loading episode details from %s for episode %s x %s ' %
( self . show . indexerid , sickbeard . indexerApi ( self . show . indexer ) . name , season , episode ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
indexer_lang = self . show . lang
try :
2016-09-04 20:00:44 +00:00
if None is cachedSeason :
if None is tvapi :
2014-03-26 19:28:46 +00:00
lINDEXER_API_PARMS = sickbeard . indexerApi ( self . indexer ) . api_params . copy ( )
2014-03-10 05:18:05 +00:00
if not cache :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' cache ' ] = False
2014-03-10 05:18:05 +00:00
if indexer_lang :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' language ' ] = indexer_lang
2014-03-10 05:18:05 +00:00
2016-09-04 20:00:44 +00:00
if 0 != self . show . dvdorder :
2014-03-12 05:28:30 +00:00
lINDEXER_API_PARMS [ ' dvdorder ' ] = True
2014-03-10 05:18:05 +00:00
2014-03-26 19:28:46 +00:00
t = sickbeard . indexerApi ( self . indexer ) . indexer ( * * lINDEXER_API_PARMS )
2014-03-10 05:18:05 +00:00
else :
t = tvapi
myEp = t [ self . show . indexerid ] [ season ] [ episode ]
else :
myEp = cachedSeason [ episode ]
2015-06-08 12:47:01 +00:00
except ( sickbeard . indexer_error , IOError ) as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s threw up an error: %s ' % ( sickbeard . indexerApi ( self . indexer ) . name , ex ( e ) ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# if the episode is already valid just log it, if not throw it up
if self . name :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s timed out but we have enough info from other sources, allowing the error ' %
sickbeard . indexerApi ( self . indexer ) . name , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return
else :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s timed out, unable to create the episode ' % sickbeard . indexerApi ( self . indexer ) . name ,
2014-03-25 05:57:24 +00:00
logger . ERROR )
2014-03-10 05:18:05 +00:00
return False
2014-03-25 05:57:24 +00:00
except ( sickbeard . indexer_episodenotfound , sickbeard . indexer_seasonnotfound ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' Unable to find the episode on %s ... has it been removed? Should I delete from db? ' %
sickbeard . indexerApi ( self . indexer ) . name , logger . DEBUG )
2014-03-20 13:45:43 +00:00
# if I'm no longer on the Indexers but I once was then delete myself from the DB
2016-09-04 20:00:44 +00:00
if - 1 != self . indexerid :
2014-03-10 05:18:05 +00:00
self . deleteEpisode ( )
return
2016-09-04 20:00:44 +00:00
if not sickbeard . ALLOW_INCOMPLETE_SHOWDATA and None is getattr ( myEp , ' episodename ' , None ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' This episode ( %s - %s x %s ) has no name on %s ' %
( self . show . name , season , episode , sickbeard . indexerApi ( self . indexer ) . name ) )
2014-03-20 18:03:22 +00:00
# if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
2016-09-04 20:00:44 +00:00
if - 1 != self . indexerid :
2014-03-10 05:18:05 +00:00
self . deleteEpisode ( )
return False
2016-09-04 20:00:44 +00:00
if None is getattr ( myEp , ' absolute_number ' , None ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' This episode ( %s - %s x %s ) has no absolute number on %s ' %
( self . show . name , season , episode , sickbeard . indexerApi ( self . indexer ) . name ) , logger . DEBUG )
2014-05-26 06:29:22 +00:00
else :
2016-09-04 20:00:44 +00:00
logger . log ( ' %s : The absolute_number for %s x %s is : %s ' %
( self . show . indexerid , season , episode , myEp [ ' absolute_number ' ] ) , logger . DEBUG )
self . absolute_number = int ( myEp [ ' absolute_number ' ] )
2014-05-26 06:29:22 +00:00
2016-09-04 20:00:44 +00:00
self . name = getattr ( myEp , ' episodename ' , ' ' )
2014-03-10 05:18:05 +00:00
self . season = season
self . episode = episode
2014-08-18 12:40:29 +00:00
sickbeard . scene_numbering . xem_refresh ( self . show . indexerid , self . show . indexer )
self . scene_absolute_number = sickbeard . scene_numbering . get_scene_absolute_numbering (
self . show . indexerid ,
self . show . indexer ,
self . absolute_number
)
self . scene_season , self . scene_episode = sickbeard . scene_numbering . get_scene_numbering (
self . show . indexerid ,
self . show . indexer ,
self . season , self . episode
)
2016-09-04 20:00:44 +00:00
self . description = getattr ( myEp , ' overview ' , ' ' )
2014-03-10 05:18:05 +00:00
2014-03-25 05:57:24 +00:00
firstaired = getattr ( myEp , ' firstaired ' , None )
2016-09-04 20:00:44 +00:00
if None is firstaired or firstaired in ' 0000-00-00 ' :
2014-03-21 11:09:18 +00:00
firstaired = str ( datetime . date . fromordinal ( 1 ) )
2016-09-04 20:00:44 +00:00
rawAirdate = [ int ( x ) for x in firstaired . split ( ' - ' ) ]
2014-03-10 05:18:05 +00:00
2016-02-23 20:55:05 +00:00
old_airdate_future = self . airdate == datetime . date . fromordinal ( 1 ) or self . airdate > = datetime . date . today ( )
2014-03-21 11:09:18 +00:00
try :
2014-03-10 05:18:05 +00:00
self . airdate = datetime . date ( rawAirdate [ 0 ] , rawAirdate [ 1 ] , rawAirdate [ 2 ] )
2014-03-27 09:42:00 +00:00
except ( ValueError , IndexError ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' Malformed air date retrieved from %s ( %s - %s x %s ) ' %
( sickbeard . indexerApi ( self . indexer ) . name , self . show . name , season , episode ) , logger . ERROR )
2014-03-20 18:03:22 +00:00
# if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
2016-09-04 20:00:44 +00:00
if - 1 != self . indexerid :
2014-03-10 05:18:05 +00:00
self . deleteEpisode ( )
return False
2014-05-27 07:44:23 +00:00
# early conversion to int so that episode doesn't get marked dirty
2014-03-25 05:57:24 +00:00
self . indexerid = getattr ( myEp , ' id ' , None )
2016-09-04 20:00:44 +00:00
if None is self . indexerid :
2016-02-23 20:55:05 +00:00
logger . log ( ' Failed to retrieve ID from %s ' % sickbeard . indexerApi ( self . indexer ) . name , logger . ERROR )
2016-09-04 20:00:44 +00:00
if - 1 != self . indexerid :
2014-03-10 05:18:05 +00:00
self . deleteEpisode ( )
return False
2014-06-10 12:40:11 +00:00
# don't update show status if show dir is missing, unless it's missing on purpose
2014-07-15 02:00:53 +00:00
if not ek . ek ( os . path . isdir ,
self . show . _location ) and not sickbeard . CREATE_MISSING_SHOW_DIRS and not sickbeard . ADD_SHOWS_WO_DIR :
2014-03-25 05:57:24 +00:00
logger . log (
2016-02-23 20:55:05 +00:00
' The show directory is missing, not bothering to change the episode statuses since it \' d probably be invalid ' )
2014-03-10 05:18:05 +00:00
return
2014-05-29 05:40:12 +00:00
if self . location :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Setting status for %s x %s based on status %s and existence of %s ' %
( self . show . indexerid , season , episode , statusStrings [ self . status ] , self . location ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2014-06-10 12:47:57 +00:00
# if we don't have the file
2014-03-10 05:18:05 +00:00
if not ek . ek ( os . path . isfile , self . location ) :
2016-03-24 18:37:09 +00:00
today = datetime . date . today ( )
2016-09-04 20:00:44 +00:00
delta = datetime . timedelta ( days = 1 )
show_time = network_timezones . parse_date_time ( self . airdate . toordinal ( ) , self . show . airs , self . show . network )
show_length = datetime . timedelta ( minutes = helpers . tryInt ( self . show . runtime , 60 ) )
tz_now = datetime . datetime . now ( network_timezones . sb_timezone )
future_airtime = ( self . airdate > ( today + delta ) or
( not self . airdate < ( today - delta ) and ( ( show_time + show_length ) > tz_now ) ) )
# if this episode hasn't aired yet set the status to UNAIRED
2016-03-24 18:37:09 +00:00
if future_airtime and self . status in [ SKIPPED , UNAIRED , UNKNOWN , WANTED ] :
2016-09-04 20:00:44 +00:00
msg = ' Episode airs in the future, marking it %s '
2014-03-10 05:18:05 +00:00
self . status = UNAIRED
2014-06-10 12:47:57 +00:00
2016-09-04 20:00:44 +00:00
# if there's no airdate then set it to unaired (and respect ignored)
2014-03-10 05:18:05 +00:00
elif self . airdate == datetime . date . fromordinal ( 1 ) :
2016-09-04 20:00:44 +00:00
if IGNORED == self . status :
msg = ' Episode has no air date and marked %s , no change '
2014-03-10 05:18:05 +00:00
else :
2016-09-04 20:00:44 +00:00
msg = ' Episode has no air date, marking it %s '
self . status = UNAIRED
2014-06-10 12:47:57 +00:00
2016-09-04 20:00:44 +00:00
# if the airdate is in the past
2014-03-10 05:18:05 +00:00
else :
2016-09-04 20:00:44 +00:00
if UNAIRED == self . status :
msg = ( ' Episode status %s %s , with air date in the past, marking it ' % (
statusStrings [ self . status ] , ' , ' . join ( [ ( ' is a special ' , ' ' ) [ 0 < self . season ] ,
( ' ' , ' is paused ' ) [ self . show . paused ] ] ) ) + ' %s ' )
self . status = ( SKIPPED , WANTED ) [ 0 < self . season and not self . show . paused ]
# if still UNKNOWN or SKIPPED with the deprecated future airdate method
elif UNKNOWN == self . status or ( SKIPPED == self . status and old_airdate_future ) :
msg = ( ' Episode status %s %s , with air date in the past, marking it ' % (
statusStrings [ self . status ] , ' , ' . join ( [
( ' ' , ' has old future date format ' ) [ SKIPPED == self . status and old_airdate_future ] ,
( ' ' , ' is being updated ' ) [ bool ( update ) ] , ( ' is a special ' , ' ' ) [ 0 < self . season ] ] ) ) + ' %s ' )
self . status = ( SKIPPED , WANTED ) [ update and not self . show . paused and 0 < self . season ]
2014-03-10 05:18:05 +00:00
else :
2016-09-04 20:00:44 +00:00
msg = ' Not touching episode status %s , with air date in the past, because there is no file '
logger . log ( msg % statusStrings [ self . status ] , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# if we have a media file then it's downloaded
2016-08-11 00:00:36 +00:00
elif sickbeard . helpers . has_media_ext ( self . location ) :
2014-03-10 05:18:05 +00:00
# leave propers alone, you have to either post-process them or manually change them back
if self . status not in Quality . SNATCHED_PROPER + Quality . DOWNLOADED + Quality . SNATCHED + [ ARCHIVED ] :
2016-09-04 20:00:44 +00:00
msg = ' (1) Status changes from %s to ' % statusStrings [ self . status ]
self . status = Quality . statusFromNameOrFile ( self . location , anime = self . show . is_anime )
logger . log ( ' %s %s ' % ( msg , statusStrings [ self . status ] ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# shouldn't get here probably
else :
2016-09-04 20:00:44 +00:00
msg = ' (2) Status changes from %s to ' % statusStrings [ self . status ]
2014-03-10 05:18:05 +00:00
self . status = UNKNOWN
2016-09-04 20:00:44 +00:00
logger . log ( ' %s %s ' % ( msg , statusStrings [ self . status ] ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
def loadFromNFO ( self , location ) :
if not ek . ek ( os . path . isdir , self . show . _location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : The show directory is missing, not bothering to try loading the episode NFO ' % self . show . indexerid )
2014-03-10 05:18:05 +00:00
return
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Loading episode details from the NFO file associated with %s ' % ( self . show . indexerid , location ) ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
self . location = location
if self . location != " " :
2016-08-11 00:00:36 +00:00
if UNKNOWN == self . status and sickbeard . helpers . has_media_ext ( self . location ) :
2015-08-21 02:32:27 +00:00
status_quality = Quality . statusFromNameOrFile ( self . location , anime = self . show . is_anime )
2016-02-23 20:55:05 +00:00
logger . log ( ' (3) Status changes from %s to %s ' % ( self . status , status_quality ) , logger . DEBUG )
2015-08-21 02:32:27 +00:00
self . status = status_quality
2014-03-10 05:18:05 +00:00
2016-02-23 20:55:05 +00:00
nfoFile = sickbeard . helpers . replaceExtension ( self . location , ' nfo ' )
logger . log ( ' %s : Using NFO name %s ' % ( self . show . indexerid , nfoFile ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
if ek . ek ( os . path . isfile , nfoFile ) :
try :
showXML = etree . ElementTree ( file = nfoFile )
2015-06-08 12:47:01 +00:00
except ( SyntaxError , ValueError ) as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' Error loading the NFO, backing up the NFO and skipping for now: %s ' % ex ( e ) ,
2014-05-27 07:44:23 +00:00
logger . ERROR ) # TODO: figure out what's wrong and fix it
2014-03-10 05:18:05 +00:00
try :
2016-02-23 20:55:05 +00:00
ek . ek ( os . rename , nfoFile , ' %s .old ' % nfoFile )
2015-06-08 12:47:01 +00:00
except Exception as e :
2014-03-25 05:57:24 +00:00
logger . log (
2016-02-23 20:55:05 +00:00
' Failed to rename your episode \' s NFO file - you need to delete it or fix it: %s ' % ex ( e ) ,
2014-03-25 05:57:24 +00:00
logger . ERROR )
2016-02-23 20:55:05 +00:00
raise exceptions . NoNFOException ( ' Error in NFO format ' )
2014-03-10 05:18:05 +00:00
for epDetails in showXML . getiterator ( ' episodedetails ' ) :
if epDetails . findtext ( ' season ' ) is None or int ( epDetails . findtext ( ' season ' ) ) != self . season or \
2014-03-25 05:57:24 +00:00
epDetails . findtext ( ' episode ' ) is None or int (
epDetails . findtext ( ' episode ' ) ) != self . episode :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : NFO has an <episodedetails> block for a different episode - wanted %s x %s but got %s x %s ' %
( self . show . indexerid , self . season , self . episode , epDetails . findtext ( ' season ' ) ,
epDetails . findtext ( ' episode ' ) ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
continue
if epDetails . findtext ( ' title ' ) is None or epDetails . findtext ( ' aired ' ) is None :
2016-02-23 20:55:05 +00:00
raise exceptions . NoNFOException ( ' Error in NFO format (missing episode title or airdate) ' )
2014-03-10 05:18:05 +00:00
self . name = epDetails . findtext ( ' title ' )
self . episode = int ( epDetails . findtext ( ' episode ' ) )
self . season = int ( epDetails . findtext ( ' season ' ) )
2014-08-18 12:40:29 +00:00
sickbeard . scene_numbering . xem_refresh ( self . show . indexerid , self . show . indexer )
self . scene_absolute_number = sickbeard . scene_numbering . get_scene_absolute_numbering (
self . show . indexerid ,
self . show . indexer ,
self . absolute_number
)
self . scene_season , self . scene_episode = sickbeard . scene_numbering . get_scene_numbering (
self . show . indexerid ,
self . show . indexer ,
self . season , self . episode
)
2014-03-10 05:18:05 +00:00
self . description = epDetails . findtext ( ' plot ' )
if self . description is None :
2016-02-23 20:55:05 +00:00
self . description = ' '
2014-03-10 05:18:05 +00:00
if epDetails . findtext ( ' aired ' ) :
rawAirdate = [ int ( x ) for x in epDetails . findtext ( ' aired ' ) . split ( " - " ) ]
self . airdate = datetime . date ( rawAirdate [ 0 ] , rawAirdate [ 1 ] , rawAirdate [ 2 ] )
else :
self . airdate = datetime . date . fromordinal ( 1 )
self . hasnfo = True
else :
self . hasnfo = False
2016-02-23 20:55:05 +00:00
if ek . ek ( os . path . isfile , sickbeard . helpers . replaceExtension ( nfoFile , ' tbn ' ) ) :
2014-03-10 05:18:05 +00:00
self . hastbn = True
else :
self . hastbn = False
def __str__ ( self ) :
2016-02-23 20:55:05 +00:00
toReturn = ' '
toReturn + = ' %s - %s x %s - %s \n ' % ( self . show . name , self . season , self . episode , self . name )
toReturn + = ' location: %s \n ' % self . location
toReturn + = ' description: %s \n ' % self . description
toReturn + = ' subtitles: %s \n ' % ' , ' . join ( self . subtitles )
toReturn + = ' subtitles_searchcount: %s \n ' % self . subtitles_searchcount
toReturn + = ' subtitles_lastsearch: %s \n ' % self . subtitles_lastsearch
toReturn + = ' airdate: %s ( %s ) \n ' % ( self . airdate . toordinal ( ) , self . airdate )
toReturn + = ' hasnfo: %s \n ' % self . hasnfo
toReturn + = ' hastbn: %s \n ' % self . hastbn
toReturn + = ' status: %s \n ' % self . status
2014-03-10 05:18:05 +00:00
return toReturn
2014-05-14 09:42:08 +00:00
def createMetaFiles ( self ) :
2014-03-10 05:18:05 +00:00
if not ek . ek ( os . path . isdir , self . show . _location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : The show directory is missing, not bothering to try to create metadata ' % self . show . indexerid )
2014-03-10 05:18:05 +00:00
return
2014-05-14 09:42:08 +00:00
self . createNFO ( )
2014-03-10 05:18:05 +00:00
self . createThumbnail ( )
if self . checkForMetaFiles ( ) :
self . saveToDB ( )
2014-05-14 09:42:08 +00:00
def createNFO ( self ) :
2014-03-10 05:18:05 +00:00
result = False
for cur_provider in sickbeard . metadata_provider_dict . values ( ) :
2014-05-14 09:42:08 +00:00
result = cur_provider . create_episode_metadata ( self ) or result
2014-03-10 05:18:05 +00:00
return result
2014-05-14 09:42:08 +00:00
def createThumbnail ( self ) :
2014-03-10 05:18:05 +00:00
result = False
for cur_provider in sickbeard . metadata_provider_dict . values ( ) :
result = cur_provider . create_episode_thumb ( self ) or result
return result
def deleteEpisode ( self ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' Deleting %s %s x %s from the DB ' % ( self . show . name , self . season , self . episode ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# remove myself from the show dictionary
if self . show . getEpisode ( self . season , self . episode , noCreate = True ) == self :
2016-02-23 20:55:05 +00:00
logger . log ( ' Removing myself from my show \' s list ' , logger . DEBUG )
2014-06-11 14:16:24 +00:00
del self . show . episodes [ self . season ] [ self . episode ]
2014-03-10 05:18:05 +00:00
# delete myself from the DB
2016-02-23 20:55:05 +00:00
logger . log ( ' Deleting myself from the database ' , logger . DEBUG )
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2016-02-23 20:55:05 +00:00
sql = ' DELETE FROM tv_episodes WHERE showid= %s AND indexer= %s AND season= %s AND episode= %s ' % \
( self . show . indexerid , self . show . indexer , self . season , self . episode )
2014-06-21 22:46:59 +00:00
myDB . action ( sql )
2014-03-10 05:18:05 +00:00
raise exceptions . EpisodeDeletedException ( )
2014-03-20 10:24:58 +00:00
def get_sql ( self , forceSave = False ) :
"""
Creates SQL queue for this episode if any of its data has been changed since the last save .
forceSave : If True it will create SQL queue even if no data has been changed since the
last save ( aka if the record is not dirty ) .
"""
if not self . dirty and not forceSave :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Not creating SQL queue - record is not dirty ' % self . show . indexerid , logger . DEBUG )
2014-03-20 10:24:58 +00:00
return
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
rows = myDB . select (
2016-02-23 20:55:05 +00:00
' SELECT episode_id FROM tv_episodes WHERE showid = ? AND indexer=? AND season = ? AND episode = ? ' ,
[ self . show . indexerid , self . show . indexer , self . season , self . episode ] )
2014-06-06 22:16:15 +00:00
epID = None
if rows :
epID = int ( rows [ 0 ] [ ' episode_id ' ] )
2016-02-01 19:37:32 +00:00
self . dirty = False
2014-06-06 22:16:15 +00:00
if epID :
# use a custom update method to get the data into the DB for existing records.
return [
2016-02-23 20:55:05 +00:00
' UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, '
' subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, '
' location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, '
' absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ? ' ,
2014-06-06 22:16:15 +00:00
[ self . indexerid , self . indexer , self . name , self . description , " , " . join ( [ sub for sub in self . subtitles ] ) ,
2014-06-07 15:33:18 +00:00
self . subtitles_searchcount , self . subtitles_lastsearch , self . airdate . toordinal ( ) , self . hasnfo ,
self . hastbn ,
self . status , self . location , self . file_size , self . release_name , self . is_proper , self . show . indexerid ,
2014-07-22 04:53:32 +00:00
self . season , self . episode , self . absolute_number , self . version , self . release_group , epID ] ]
2014-06-06 22:16:15 +00:00
else :
# use a custom insert method to get the data into the DB.
return [
2016-02-23 20:55:05 +00:00
' INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, '
' subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, '
' release_name, is_proper, showid, season, episode, absolute_number, version, release_group) VALUES '
' ((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?) '
' ,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?); ' ,
2014-06-07 15:33:18 +00:00
[ self . show . indexerid , self . season , self . episode , self . indexerid , self . indexer , self . name ,
self . description ,
2014-06-06 22:16:15 +00:00
" , " . join ( [ sub for sub in self . subtitles ] ) , self . subtitles_searchcount , self . subtitles_lastsearch ,
self . airdate . toordinal ( ) , self . hasnfo , self . hastbn , self . status , self . location , self . file_size ,
2014-06-07 15:33:18 +00:00
self . release_name , self . is_proper , self . show . indexerid , self . season , self . episode ,
2014-07-22 04:53:32 +00:00
self . absolute_number , self . version , self . release_group ] ]
2014-03-20 10:24:58 +00:00
2014-03-10 05:18:05 +00:00
def saveToDB ( self , forceSave = False ) :
"""
Saves this episode to the database if any of its data has been changed since the last save .
2015-05-14 06:12:06 +00:00
2014-03-10 05:18:05 +00:00
forceSave : If True it will save to the database even if no data has been changed since the
last save ( aka if the record is not dirty ) .
"""
if not self . dirty and not forceSave :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Not saving episode to db - record is not dirty ' % self . show . indexerid , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Saving episode details to database ' % self . show . indexerid , logger . DEBUG )
logger . log ( ' STATUS IS %s ' % statusStrings [ self . status ] , logger . DEBUG )
newValueDict = { ' indexerid ' : self . indexerid ,
' indexer ' : self . indexer ,
' name ' : self . name ,
' description ' : self . description ,
' subtitles ' : ' , ' . join ( [ sub for sub in self . subtitles ] ) ,
' subtitles_searchcount ' : self . subtitles_searchcount ,
' subtitles_lastsearch ' : self . subtitles_lastsearch ,
' airdate ' : self . airdate . toordinal ( ) ,
' hasnfo ' : self . hasnfo ,
' hastbn ' : self . hastbn ,
' status ' : self . status ,
' location ' : self . location ,
' file_size ' : self . file_size ,
' release_name ' : self . release_name ,
' is_proper ' : self . is_proper ,
' absolute_number ' : self . absolute_number ,
' version ' : self . version ,
' release_group ' : self . release_group
2014-05-27 07:44:23 +00:00
}
2016-02-23 20:55:05 +00:00
controlValueDict = { ' showid ' : self . show . indexerid ,
' season ' : self . season ,
' episode ' : self . episode }
2014-03-10 05:18:05 +00:00
# use a custom update/insert method to get the data into the DB
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
2016-02-23 20:55:05 +00:00
myDB . upsert ( ' tv_episodes ' , newValueDict , controlValueDict )
2016-02-01 19:37:32 +00:00
self . dirty = False
2014-03-10 05:18:05 +00:00
def fullPath ( self ) :
2014-03-20 18:03:22 +00:00
if self . location == None or self . location == " " :
2014-03-10 05:18:05 +00:00
return None
else :
return ek . ek ( os . path . join , self . show . location , self . location )
2014-05-31 10:35:11 +00:00
def createStrings ( self , pattern = None ) :
patterns = [
' % S.N.S % SE %0E ' ,
' % S.N.S % 0SE %E ' ,
' % S.N.S % SE %E ' ,
' % S.N.S % 0SE %0E ' ,
' % SN S % SE %0E ' ,
' % SN S % 0SE %E ' ,
' % SN S % SE %E ' ,
' % SN S % 0SE %0E '
]
strings = [ ]
if not pattern :
for p in patterns :
strings + = [ self . _format_pattern ( p ) ]
return strings
return self . _format_pattern ( pattern )
2014-03-10 05:18:05 +00:00
def prettyName ( self ) :
"""
Returns the name of this episode in a " pretty " human - readable format . Used for logging
and notifications and such .
2015-05-14 06:12:06 +00:00
Returns : A string representing the episode ' s name and season/ep numbers
2014-03-10 05:18:05 +00:00
"""
2014-05-26 06:29:22 +00:00
2014-05-31 10:35:11 +00:00
if self . show . anime and not self . show . scene :
return self . _format_pattern ( ' % SN - % AB - %E N ' )
2014-05-30 06:22:01 +00:00
elif self . show . air_by_date :
return self . _format_pattern ( ' % SN - % AD - %E N ' )
2014-05-31 10:35:11 +00:00
return self . _format_pattern ( ' % SN - % Sx %0E - %E N ' )
2014-04-26 10:37:40 +00:00
2014-03-10 05:18:05 +00:00
def _ep_name ( self ) :
"""
Returns the name of the episode to use during renaming . Combines the names of related episodes .
Eg . " Ep Name (1) " and " Ep Name (2) " becomes " Ep Name "
" Ep Name " and " Other Ep Name " becomes " Ep Name & Other Ep Name "
"""
2016-02-23 20:55:05 +00:00
multiNameRegex = ' (.*) \ ( \ d { 1,2} \ ) '
2014-03-10 05:18:05 +00:00
self . relatedEps = sorted ( self . relatedEps , key = lambda x : x . episode )
if len ( self . relatedEps ) == 0 :
goodName = self . name
else :
goodName = ' '
singleName = True
curGoodName = None
for curName in [ self . name ] + [ x . name for x in self . relatedEps ] :
match = re . match ( multiNameRegex , curName )
if not match :
singleName = False
break
2014-03-20 18:03:22 +00:00
if curGoodName == None :
2014-03-10 05:18:05 +00:00
curGoodName = match . group ( 1 )
elif curGoodName != match . group ( 1 ) :
singleName = False
break
if singleName :
goodName = curGoodName
else :
goodName = self . name
for relEp in self . relatedEps :
goodName + = " & " + relEp . name
return goodName
def _replace_map ( self ) :
"""
Generates a replacement map for this episode which maps all possible custom naming patterns to the correct
value for this episode .
2015-05-14 06:12:06 +00:00
2014-03-10 05:18:05 +00:00
Returns : A dict with patterns as the keys and their replacement values as the values .
"""
ep_name = self . _ep_name ( )
def dot ( name ) :
return helpers . sanitizeSceneName ( name )
def us ( name ) :
return re . sub ( ' [ -] ' , ' _ ' , name )
2015-09-18 00:06:34 +00:00
def release_name ( name , is_anime = False ) :
2014-07-19 22:23:01 +00:00
if name :
2016-09-07 20:24:10 +00:00
name = helpers . remove_non_release_groups ( name , is_anime )
2014-03-10 05:18:05 +00:00
return name
2014-07-15 09:40:21 +00:00
def release_group ( show , name ) :
2014-07-19 22:23:01 +00:00
if name :
2016-09-07 20:24:10 +00:00
name = helpers . remove_non_release_groups ( name , show . is_anime )
2014-07-19 22:23:01 +00:00
else :
2016-02-23 20:55:05 +00:00
return ' '
2014-03-10 05:18:05 +00:00
try :
2014-07-15 09:40:21 +00:00
np = NameParser ( name , showObj = show , naming_pattern = True )
2014-03-10 05:18:05 +00:00
parse_result = np . parse ( name )
2015-06-08 12:47:01 +00:00
except ( InvalidNameException , InvalidShowException ) as e :
2016-02-23 20:55:05 +00:00
logger . log ( ' Unable to get parse release_group: %s ' % ex ( e ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return ' '
if not parse_result . release_group :
return ' '
return parse_result . release_group
epStatus , epQual = Quality . splitCompositeStatus ( self . status ) # @UnusedVariable
if sickbeard . NAMING_STRIP_YEAR :
2016-02-23 20:55:05 +00:00
show_name = re . sub ( ' \ ( \ d+ \ )$ ' , ' ' , self . show . name ) . rstrip ( )
2014-03-10 05:18:05 +00:00
else :
show_name = self . show . name
2014-04-30 12:10:13 +00:00
return {
' % SN ' : show_name ,
' % S.N ' : dot ( show_name ) ,
' % S_N ' : us ( show_name ) ,
' %E N ' : ep_name ,
' %E .N ' : dot ( ep_name ) ,
' %E _N ' : us ( ep_name ) ,
' % QN ' : Quality . qualityStrings [ epQual ] ,
' % Q.N ' : dot ( Quality . qualityStrings [ epQual ] ) ,
' % Q_N ' : us ( Quality . qualityStrings [ epQual ] ) ,
' % S ' : str ( self . season ) ,
2014-05-08 14:03:50 +00:00
' % 0S ' : ' %02d ' % self . season ,
2014-04-30 12:10:13 +00:00
' %E ' : str ( self . episode ) ,
2014-05-08 14:03:50 +00:00
' %0E ' : ' %02d ' % self . episode ,
2014-05-03 22:29:00 +00:00
' %X S ' : str ( self . scene_season ) ,
2014-05-08 14:03:50 +00:00
' %0X S ' : ' %02d ' % self . scene_season ,
2014-05-03 22:29:00 +00:00
' %X E ' : str ( self . scene_episode ) ,
2014-05-08 14:03:50 +00:00
' %0X E ' : ' %02d ' % self . scene_episode ,
2014-05-31 10:35:11 +00:00
' % AB ' : ' % (#)03d ' % { ' # ' : self . absolute_number } ,
2014-06-07 08:17:12 +00:00
' %X AB ' : ' % (#)03d ' % { ' # ' : self . scene_absolute_number } ,
2015-09-18 00:06:34 +00:00
' % RN ' : release_name ( self . release_name , self . show . is_anime ) ,
2014-07-15 09:40:21 +00:00
' % RG ' : release_group ( self . show , self . release_name ) ,
2014-04-30 12:10:13 +00:00
' % AD ' : str ( self . airdate ) . replace ( ' - ' , ' ' ) ,
' % A.D ' : str ( self . airdate ) . replace ( ' - ' , ' . ' ) ,
' % A_D ' : us ( str ( self . airdate ) ) ,
' % A-D ' : str ( self . airdate ) ,
' % Y ' : str ( self . airdate . year ) ,
' % M ' : str ( self . airdate . month ) ,
' % D ' : str ( self . airdate . day ) ,
' % 0M ' : ' %02d ' % self . airdate . month ,
' % 0D ' : ' %02d ' % self . airdate . day ,
' % RT ' : " PROPER " if self . is_proper else " " ,
2015-10-27 14:49:48 +00:00
' % V ' : ' v %s ' % self . version if self . show . is_anime and self . version > 1 else ' ' ,
2014-04-30 12:10:13 +00:00
}
2014-04-29 04:55:59 +00:00
2014-03-10 05:18:05 +00:00
def _format_string ( self , pattern , replace_map ) :
"""
Replaces all template strings with the correct value
"""
result_name = pattern
# do the replacements
for cur_replacement in sorted ( replace_map . keys ( ) , reverse = True ) :
result_name = result_name . replace ( cur_replacement , helpers . sanitizeFileName ( replace_map [ cur_replacement ] ) )
2014-03-25 05:57:24 +00:00
result_name = result_name . replace ( cur_replacement . lower ( ) ,
helpers . sanitizeFileName ( replace_map [ cur_replacement ] . lower ( ) ) )
2014-03-10 05:18:05 +00:00
return result_name
2014-06-07 08:17:12 +00:00
def _format_pattern ( self , pattern = None , multi = None , anime_type = None ) :
2014-03-10 05:18:05 +00:00
"""
Manipulates an episode naming pattern and then fills the template in
"""
2014-03-20 18:03:22 +00:00
if pattern == None :
2014-03-10 05:18:05 +00:00
pattern = sickbeard . NAMING_PATTERN
2014-03-20 18:03:22 +00:00
if multi == None :
2014-03-10 05:18:05 +00:00
multi = sickbeard . NAMING_MULTI_EP
2014-06-07 08:17:12 +00:00
if anime_type == None :
2014-07-15 09:40:21 +00:00
anime_type = sickbeard . NAMING_ANIME
2014-06-07 08:17:12 +00:00
2014-03-10 05:18:05 +00:00
replace_map = self . _replace_map ( )
result_name = pattern
# if there's no release group then replace it with a reasonable facsimile
if not replace_map [ ' % RN ' ] :
2014-04-28 09:15:29 +00:00
if self . show . air_by_date or self . show . sports :
2014-11-12 16:43:14 +00:00
result_name = result_name . replace ( ' % RN ' , ' % S.N. % A.D. %E .N-SickGear ' )
result_name = result_name . replace ( ' %r n ' , ' %s .n. % A.D. %e .n-SickGear ' )
2014-07-15 09:40:21 +00:00
elif anime_type != 3 :
2014-11-12 16:43:14 +00:00
result_name = result_name . replace ( ' % RN ' , ' % S.N. % AB. %E .N-SickGear ' )
result_name = result_name . replace ( ' %r n ' , ' %s .n. %a b. %e .n-SickGear ' )
2014-03-10 05:18:05 +00:00
else :
2014-11-12 16:43:14 +00:00
result_name = result_name . replace ( ' % RN ' , ' % S.N.S % 0SE %0E . %E .N-SickGear ' )
result_name = result_name . replace ( ' %r n ' , ' %s .n.s %0s e %0e . %e .n-SickGear ' )
2014-03-10 05:18:05 +00:00
2014-11-12 16:43:14 +00:00
result_name = result_name . replace ( ' % RG ' , ' SickGear ' )
result_name = result_name . replace ( ' %r g ' , ' SickGear ' )
2016-02-23 20:55:05 +00:00
logger . log ( ' Episode has no release name, replacing it with a generic one: %s ' % result_name , logger . DEBUG )
2014-03-10 05:18:05 +00:00
2014-05-12 14:09:55 +00:00
if not replace_map [ ' % RT ' ] :
result_name = re . sub ( ' ([ _.-]*) % RT([ _.-]*) ' , r ' \ 2 ' , result_name )
2014-03-10 05:18:05 +00:00
# split off ep name part only
name_groups = re . split ( r ' [ \\ /] ' , result_name )
# figure out the double-ep numbering style for each group, if applicable
for cur_name_group in name_groups :
season_format = sep = ep_sep = ep_format = None
season_ep_regex = '''
( ? P < pre_sep > [ _ . - ] * )
( ( ? : s ( ? : eason | eries ) ? \s * ) ? % 0 ? S ( ? ! [ . _ ] ? N ) )
( . * ? )
( % 0 ? E ( ? ! [ . _ ] ? N ) )
( ? P < post_sep > [ _ . - ] * )
'''
ep_only_regex = ' (E? % 0?E(?![._]?N)) '
# try the normal way
season_ep_match = re . search ( season_ep_regex , cur_name_group , re . I | re . X )
ep_only_match = re . search ( ep_only_regex , cur_name_group , re . I | re . X )
# if we have a season and episode then collect the necessary data
if season_ep_match :
season_format = season_ep_match . group ( 2 )
ep_sep = season_ep_match . group ( 3 )
ep_format = season_ep_match . group ( 4 )
sep = season_ep_match . group ( ' pre_sep ' )
if not sep :
sep = season_ep_match . group ( ' post_sep ' )
if not sep :
sep = ' '
# force 2-3-4 format if they chose to extend
if multi in ( NAMING_EXTEND , NAMING_LIMITED_EXTEND , NAMING_LIMITED_EXTEND_E_PREFIXED ) :
ep_sep = ' - '
regex_used = season_ep_regex
# if there's no season then there's not much choice so we'll just force them to use 03-04-05 style
elif ep_only_match :
season_format = ' '
ep_sep = ' - '
ep_format = ep_only_match . group ( 1 )
sep = ' '
regex_used = ep_only_regex
else :
continue
# we need at least this much info to continue
if not ep_sep or not ep_format :
continue
# start with the ep string, eg. E03
ep_string = self . _format_string ( ep_format . upper ( ) , replace_map )
for other_ep in self . relatedEps :
# for limited extend we only append the last ep
2014-03-25 05:57:24 +00:00
if multi in ( NAMING_LIMITED_EXTEND , NAMING_LIMITED_EXTEND_E_PREFIXED ) and other_ep != self . relatedEps [
- 1 ] :
2014-03-10 05:18:05 +00:00
continue
elif multi == NAMING_DUPLICATE :
# add " - S01"
ep_string + = sep + season_format
elif multi == NAMING_SEPARATED_REPEAT :
ep_string + = sep
# add "E04"
ep_string + = ep_sep
if multi == NAMING_LIMITED_EXTEND_E_PREFIXED :
ep_string + = ' E '
ep_string + = other_ep . _format_string ( ep_format . upper ( ) , other_ep . _replace_map ( ) )
2014-07-15 09:40:21 +00:00
if anime_type != 3 :
2014-06-07 08:17:12 +00:00
if self . absolute_number == 0 :
curAbsolute_number = self . episode
else :
curAbsolute_number = self . absolute_number
if self . season != 0 : # dont set absolute numbers if we are on specials !
if anime_type == 1 : # this crazy person wants both ! (note: +=)
2016-02-23 20:55:05 +00:00
ep_string + = sep + ' % (#)03d ' % {
' # ' : curAbsolute_number }
2014-06-07 08:17:12 +00:00
elif anime_type == 2 : # total anime freak only need the absolute number ! (note: =)
2016-02-23 20:55:05 +00:00
ep_string = ' % (#)03d ' % { ' # ' : curAbsolute_number }
2014-06-07 08:17:12 +00:00
for relEp in self . relatedEps :
if relEp . absolute_number != 0 :
2016-02-23 20:55:05 +00:00
ep_string + = ' - ' + ' % (#)03d ' % { ' # ' : relEp . absolute_number }
2014-06-07 08:17:12 +00:00
else :
2016-02-23 20:55:05 +00:00
ep_string + = ' - ' + ' % (#)03d ' % { ' # ' : relEp . episode }
2014-06-07 08:17:12 +00:00
regex_replacement = None
if anime_type == 2 :
regex_replacement = r ' \ g<pre_sep> ' + ep_string + r ' \ g<post_sep> '
elif season_ep_match :
2014-03-10 05:18:05 +00:00
regex_replacement = r ' \ g<pre_sep> \ g<2> \ g<3> ' + ep_string + r ' \ g<post_sep> '
elif ep_only_match :
regex_replacement = ep_string
2014-06-07 08:17:12 +00:00
if regex_replacement :
# fill out the template for this piece and then insert this piece into the actual pattern
cur_name_group_result = re . sub ( ' (?i)(?x) ' + regex_used , regex_replacement , cur_name_group )
# cur_name_group_result = cur_name_group.replace(ep_format, ep_string)
# logger.log(u"found "+ep_format+" as the ep pattern using "+regex_used+" and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" from "+cur_name_group, logger.DEBUG)
result_name = result_name . replace ( cur_name_group , cur_name_group_result )
2014-03-10 05:18:05 +00:00
result_name = self . _format_string ( result_name , replace_map )
2016-02-23 20:55:05 +00:00
logger . log ( ' formatting pattern: %s -> %s ' % ( pattern , result_name ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
return result_name
def proper_path ( self ) :
2015-05-14 06:12:06 +00:00
"""
2014-03-10 05:18:05 +00:00
Figures out the path where this episode SHOULD live according to the renaming rules , relative from the show dir
"""
2014-07-19 19:50:08 +00:00
anime_type = sickbeard . NAMING_ANIME
if not self . show . is_anime :
anime_type = 3
result = self . formatted_filename ( anime_type = anime_type )
2014-03-10 05:18:05 +00:00
# if they want us to flatten it and we're allowed to flatten it then we will
if self . show . flatten_folders and not sickbeard . NAMING_FORCE_FOLDERS :
return result
# if not we append the folder on and use that
else :
result = ek . ek ( os . path . join , self . formatted_dir ( ) , result )
return result
def formatted_dir ( self , pattern = None , multi = None ) :
"""
Just the folder name of the episode
"""
2014-03-20 18:03:22 +00:00
if pattern == None :
2014-03-10 05:18:05 +00:00
# we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep
if self . show . air_by_date and sickbeard . NAMING_CUSTOM_ABD and not self . relatedEps :
pattern = sickbeard . NAMING_ABD_PATTERN
2014-04-28 09:15:29 +00:00
elif self . show . sports and sickbeard . NAMING_CUSTOM_SPORTS and not self . relatedEps :
pattern = sickbeard . NAMING_SPORTS_PATTERN
2014-09-28 09:20:42 +00:00
elif self . show . anime and sickbeard . NAMING_CUSTOM_ANIME :
pattern = sickbeard . NAMING_ANIME_PATTERN
2014-03-10 05:18:05 +00:00
else :
pattern = sickbeard . NAMING_PATTERN
# split off the dirs only, if they exist
name_groups = re . split ( r ' [ \\ /] ' , pattern )
if len ( name_groups ) == 1 :
return ' '
else :
return self . _format_pattern ( os . sep . join ( name_groups [ : - 1 ] ) , multi )
2014-06-07 08:17:12 +00:00
def formatted_filename ( self , pattern = None , multi = None , anime_type = None ) :
2014-03-10 05:18:05 +00:00
"""
Just the filename of the episode , formatted based on the naming settings
"""
2014-03-20 18:03:22 +00:00
if pattern == None :
2014-03-10 05:18:05 +00:00
# we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep
if self . show . air_by_date and sickbeard . NAMING_CUSTOM_ABD and not self . relatedEps :
pattern = sickbeard . NAMING_ABD_PATTERN
2014-04-28 09:15:29 +00:00
elif self . show . sports and sickbeard . NAMING_CUSTOM_SPORTS and not self . relatedEps :
2014-04-28 13:35:49 +00:00
pattern = sickbeard . NAMING_SPORTS_PATTERN
2014-09-28 09:20:42 +00:00
elif self . show . anime and sickbeard . NAMING_CUSTOM_ANIME :
pattern = sickbeard . NAMING_ANIME_PATTERN
2014-03-10 05:18:05 +00:00
else :
pattern = sickbeard . NAMING_PATTERN
2014-05-08 17:10:13 +00:00
# split off the dirs only, if they exist
2014-03-10 05:18:05 +00:00
name_groups = re . split ( r ' [ \\ /] ' , pattern )
2014-06-07 08:17:12 +00:00
return self . _format_pattern ( name_groups [ - 1 ] , multi , anime_type )
2014-03-10 05:18:05 +00:00
def rename ( self ) :
"""
Renames an episode file and all related files to the location and filename as specified
in the naming settings .
"""
if not ek . ek ( os . path . isfile , self . location ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' Can \' t perform rename on %s when it doesn \' t exist, skipping ' % self . location , logger . WARNING )
2014-03-10 05:18:05 +00:00
return
proper_path = self . proper_path ( )
absolute_proper_path = ek . ek ( os . path . join , self . show . location , proper_path )
absolute_current_path_no_ext , file_ext = ek . ek ( os . path . splitext , self . location )
absolute_current_path_no_ext_length = len ( absolute_current_path_no_ext )
related_subs = [ ]
current_path = absolute_current_path_no_ext
if absolute_current_path_no_ext . startswith ( self . show . location ) :
current_path = absolute_current_path_no_ext [ len ( self . show . location ) : ]
2016-02-23 20:55:05 +00:00
logger . log ( ' Renaming/moving episode from the base path %s to %s ' % ( self . location , absolute_proper_path ) ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
# if it's already named correctly then don't do anything
if proper_path == current_path :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : File %s is already named correctly, skipping ' % ( self . indexerid , self . location ) ,
2014-03-25 05:57:24 +00:00
logger . DEBUG )
2014-03-10 05:18:05 +00:00
return
2014-03-29 04:41:35 +00:00
related_files = postProcessor . PostProcessor ( self . location ) . list_associated_files (
2015-02-07 18:11:40 +00:00
self . location , base_name_only = True )
2014-03-10 05:18:05 +00:00
if self . show . subtitles and sickbeard . SUBTITLES_DIR != ' ' :
2014-05-27 07:44:23 +00:00
related_subs = postProcessor . PostProcessor ( self . location ) . list_associated_files ( sickbeard . SUBTITLES_DIR ,
subtitles_only = True )
2014-03-10 05:18:05 +00:00
absolute_proper_subs_path = ek . ek ( os . path . join , sickbeard . SUBTITLES_DIR , self . formatted_filename ( ) )
2016-02-23 20:55:05 +00:00
logger . log ( ' Files associated to %s : %s ' % ( self . location , related_files ) , logger . DEBUG )
2014-03-10 05:18:05 +00:00
# move the ep file
result = helpers . rename_ep_file ( self . location , absolute_proper_path , absolute_current_path_no_ext_length )
# move related files
for cur_related_file in related_files :
2014-03-25 05:57:24 +00:00
cur_result = helpers . rename_ep_file ( cur_related_file , absolute_proper_path ,
absolute_current_path_no_ext_length )
2014-05-02 11:33:06 +00:00
if not cur_result :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Unable to rename file %s ' % ( self . indexerid , cur_related_file ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
for cur_related_sub in related_subs :
2014-05-30 10:01:49 +00:00
absolute_proper_subs_path = ek . ek ( os . path . join , sickbeard . SUBTITLES_DIR , self . formatted_filename ( ) )
2014-05-27 07:44:23 +00:00
cur_result = helpers . rename_ep_file ( cur_related_sub , absolute_proper_subs_path ,
absolute_current_path_no_ext_length )
2014-05-02 11:33:06 +00:00
if not cur_result :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Unable to rename file %s ' % ( self . indexerid , cur_related_sub ) , logger . ERROR )
2014-03-10 05:18:05 +00:00
# save the ep
with self . lock :
2014-05-02 11:33:06 +00:00
if result :
2014-03-10 05:18:05 +00:00
self . location = absolute_proper_path + file_ext
for relEp in self . relatedEps :
relEp . location = absolute_proper_path + file_ext
# in case something changed with the metadata just do a quick check
for curEp in [ self ] + self . relatedEps :
curEp . checkForMetaFiles ( )
2014-06-07 18:36:26 +00:00
# save any changes to the databas
2014-05-30 10:01:49 +00:00
sql_l = [ ]
2014-03-10 05:18:05 +00:00
with self . lock :
2014-06-07 21:32:38 +00:00
for relEp in [ self ] + self . relatedEps :
2015-03-29 11:20:29 +00:00
result = relEp . get_sql ( )
if None is not result :
sql_l . append ( result )
2014-05-30 10:01:49 +00:00
2015-03-29 11:20:29 +00:00
if 0 < len ( sql_l ) :
2014-06-21 22:46:59 +00:00
myDB = db . DBConnection ( )
myDB . mass_action ( sql_l )
2014-06-30 15:57:32 +00:00
2014-07-21 13:29:07 +00:00
def airdateModifyStamp ( self ) :
"""
Make the modify date and time of a file reflect the show air date and time .
Note : Also called from postProcessor
"""
2015-03-19 23:12:19 +00:00
if not datetime . date == type ( self . airdate ) or 1 == self . airdate . year :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Did not change modify date of %s because episode date is never aired or invalid '
2015-03-19 23:12:19 +00:00
% ( self . show . indexerid , os . path . basename ( self . location ) ) , logger . DEBUG )
return
hr = m = 0
2014-07-21 13:29:07 +00:00
airs = re . search ( ' .*?( \ d { 1,2})(?:: \ s*?( \ d {2} ))? \ s*(pm)? ' , self . show . airs , re . I )
if airs :
hr = int ( airs . group ( 1 ) )
hr = ( 12 + hr , hr ) [ None is airs . group ( 3 ) ]
2014-11-20 18:35:14 +00:00
hr = ( hr , hr - 12 ) [ 0 == hr % 12 and 0 != hr ]
2015-03-19 23:12:19 +00:00
m = int ( ( airs . group ( 2 ) , m ) [ None is airs . group ( 2 ) ] )
airtime = datetime . time ( hr , m )
2014-07-21 13:29:07 +00:00
airdatetime = datetime . datetime . combine ( self . airdate , airtime )
filemtime = datetime . datetime . fromtimestamp ( os . path . getmtime ( self . location ) )
if filemtime != airdatetime :
import time
airdatetime = airdatetime . timetuple ( )
if helpers . touchFile ( self . location , time . mktime ( airdatetime ) ) :
2016-02-23 20:55:05 +00:00
logger . log ( ' %s : Changed modify date of %s to show air date %s '
2015-03-19 23:12:19 +00:00
% ( self . show . indexerid , os . path . basename ( self . location ) , time . strftime ( ' % b %d , % Y ( % H: % M) ' , airdatetime ) ) )
2014-07-21 13:29:07 +00:00
2014-07-15 02:00:53 +00:00
def __getstate__ ( self ) :
d = dict ( self . __dict__ )
del d [ ' lock ' ]
return d
def __setstate__ ( self , d ) :
d [ ' lock ' ] = threading . Lock ( )
self . __dict__ . update ( d )