mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-02 17:33:37 +00:00
Added Black and White list for release groups for anime shows.
Fixed anime show list splitter. Added check for anime shows and providers so we don't perform searches with a provider that doesn't support the type of show genre we are searching.
This commit is contained in:
parent
c217a4fc57
commit
34da3f53d8
19 changed files with 819 additions and 108 deletions
|
@ -176,6 +176,25 @@
|
|||
#if $anyQualities + $bestQualities
|
||||
<tr><td class="showLegend">Archive First Match: </td><td><img src="$sbRoot/images/#if int($show.archive_firstmatch) == 1 then "yes16.png\" alt=\"Y" else "no16.png\" alt=\"N"#" width="16" height="16" /></td></tr>
|
||||
#end if
|
||||
|
||||
#if $bwl.get_white_keywords_for("gloabl"):
|
||||
<tr><td class="showLegend">Whitelist: </td><td>#echo ', '.join($bwl.get_white_keywords_for("gloabl"))#</td></tr>
|
||||
#end if
|
||||
#if $bwl.get_black_keywords_for("gloabl"):
|
||||
<tr><td class="showLegend">Blacklist: </td><td>#echo ', '.join($bwl.get_black_keywords_for("gloabl"))#</td></tr>
|
||||
#end if
|
||||
#if $bwl.get_white_keywords_for("release_group"):
|
||||
<tr>
|
||||
<td class="showLegend">Wanted Group#if len($bwl.get_white_keywords_for("release_group"))>1 then "s" else ""#:</td>
|
||||
<td>#echo ', '.join($bwl.get_white_keywords_for("release_group"))#</td>
|
||||
</tr>
|
||||
#end if
|
||||
#if $bwl.get_black_keywords_for("release_group"):
|
||||
<tr>
|
||||
<td class="showLegend">Unwanted Group#if len($bwl.get_black_keywords_for("release_group"))>1 then "s" else ""#:</td>
|
||||
<td>#echo ', '.join($bwl.get_black_keywords_for("release_group"))#</td>
|
||||
</tr>
|
||||
#end if
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#import sickbeard
|
||||
#import lib.adba as adba
|
||||
#from sickbeard import common
|
||||
#from sickbeard import exceptions
|
||||
#from sickbeard import scene_exceptions
|
||||
|
@ -139,6 +140,53 @@ Results without one of these words in the title will be filtered out <br />
|
|||
Separate words with a comma, e.g. "word1,word2,word3"
|
||||
<br /><br />
|
||||
|
||||
#if $show.is_anime
|
||||
<p>
|
||||
Realease Groups:
|
||||
</p>
|
||||
<input type="text" id="addToPoolText"/>
|
||||
<input type="button" value="Add to White" id="addToWhite">
|
||||
<input type="button" value="Add to Black" id="addToBlack"><br/>
|
||||
<div class="blackwhiteliste white">
|
||||
<span>White:</span>
|
||||
<select multiple id="white">
|
||||
#for $keyword in $whitelist:
|
||||
<option value="$keyword">$keyword</option>
|
||||
#end for
|
||||
</select>
|
||||
<br/>
|
||||
<input id="removeW" value="Remove >>" type="button"/>
|
||||
</div>
|
||||
|
||||
<div class="blackwhiteliste pool">
|
||||
<span>Pool (Name|Rating|Subed Ep):</span>
|
||||
<select multiple id="pool">
|
||||
#for $group in $groups
|
||||
#if $group not in $whitelist and $group['name'] not in $blacklist:
|
||||
<option value="$group['name']">$group['name'] | $group['rating'] | $group['range']</option>
|
||||
#end if
|
||||
#end for
|
||||
</select>
|
||||
<br/>
|
||||
<input id="addW" value="<< Add" type="button"/>
|
||||
<input id="addB" value="Add >>" type="button"/>
|
||||
</div>
|
||||
|
||||
<div class="blackwhiteliste black">
|
||||
<span>Black:</span>
|
||||
<select multiple id="black">
|
||||
#for $keyword in $blacklist:
|
||||
<option value="$keyword">$keyword</option>
|
||||
#end for
|
||||
</select>
|
||||
<br/>
|
||||
<input id="removeB" value="<< Remove" type="button"/>
|
||||
</div>
|
||||
<br style="clear:both;"/>
|
||||
#end if
|
||||
<input type="hidden" name="whitelist" id="whitelist"/>
|
||||
<input type="hidden" name="blacklist" id="blacklist"/>
|
||||
|
||||
<input type="submit" id="submit" value="Submit" class="btn btn-primary" />
|
||||
</form>
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@
|
|||
\$(this).remove();
|
||||
});
|
||||
|
||||
\$("#showListTable:has(tbody tr)").tablesorter({
|
||||
\$("#showListTableShows:has(tbody tr)").tablesorter({
|
||||
|
||||
sortList: [[6,1],[2,0]],
|
||||
textExtraction: {
|
||||
|
@ -119,6 +119,26 @@
|
|||
}
|
||||
});
|
||||
|
||||
\$("#showListTableAnime:has(tbody tr)").tablesorter({
|
||||
|
||||
sortList: [[6,1],[2,0]],
|
||||
textExtraction: {
|
||||
0: function(node) { return \$(node).find("span").text().toLowerCase(); },
|
||||
#if ( $layout != 'simple'):
|
||||
3: function(node) { return \$(node).find("img").attr("alt"); },
|
||||
#end if
|
||||
4: function(node) { return \$(node).find("span").text(); },
|
||||
6: function(node) { return \$(node).find("img").attr("alt"); }
|
||||
},
|
||||
widgets: ['saveSort', 'zebra'],
|
||||
headers: {
|
||||
0: { sorter: 'cDate' },
|
||||
2: { sorter: 'loadingNames' },
|
||||
3: { sorter: 'network' },
|
||||
4: { sorter: 'quality' },
|
||||
5: { sorter: 'eps' },
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
//-->
|
||||
|
@ -138,7 +158,14 @@
|
|||
</span>
|
||||
</div>
|
||||
|
||||
<table id="showListTable" class="sickbeardTable tablesorter" cellspacing="1" border="0" cellpadding="0">
|
||||
#for $curShowlist in $showlists:
|
||||
#set $curListType = $curShowlist[0]
|
||||
#set $myShowList = $list($curShowlist[1])
|
||||
#if $curListType == "Anime":
|
||||
<h2>Anime List</h2>
|
||||
#end if
|
||||
|
||||
<table id="showListTable$curListType" class="sickbeardTable tablesorter" cellspacing="1" border="0" cellpadding="0">
|
||||
|
||||
<thead><tr><th class="nowrap">Next Ep</th>#if $layout=="poster" then "<th>Poster</th>" else "<th style='display: none;'></th>"#<th>Show</th><th>Network</th><th>Quality</th><th>Downloads</th><th>Active</th><th>Status</th></tr></thead>
|
||||
<tfoot>
|
||||
|
@ -173,7 +200,6 @@
|
|||
</tr>
|
||||
#end for
|
||||
|
||||
#set $myShowList = $list($sickbeard.showList)
|
||||
$myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
||||
#for $curShow in $myShowList:
|
||||
#set $curEp = $curShow.nextEpisode()
|
||||
|
@ -258,7 +284,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
|||
//-->
|
||||
</script>
|
||||
</td>
|
||||
<td align="center"><img src="$sbRoot/images/#if int($curShow.paused) == 0 and $curShow.status != "Ended" then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||
<td align="center"><img src="$sbRoot/images/#if int($curShow.paused) == 0 and "Ended" not in $curShow.status then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||
<td align="center" style="color: #555555; font-weight: bold;">$curShow.status</td>
|
||||
</tr>
|
||||
|
||||
|
@ -266,5 +292,8 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
|||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#end for
|
||||
|
||||
<script type="text/javascript" src="$sbRoot/js/tableClick.js"></script>
|
||||
|
||||
#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
|
||||
|
|
|
@ -82,6 +82,8 @@
|
|||
<!-- <th>Lang</th>//-->
|
||||
<th class="nowrap" style="text-align: left;">Show Name</th>
|
||||
<th>Quality</th>
|
||||
<th>Sports</th>
|
||||
<th>Anime</th>
|
||||
<th>Flat Folders</th>
|
||||
<th>Paused</th>
|
||||
<th>Status</th>
|
||||
|
@ -141,7 +143,9 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
|||
<td align="center"><span class="quality $qualityPresetStrings[$curShow.quality]">$qualityPresetStrings[$curShow.quality]</span></td>
|
||||
#else:
|
||||
<td align="center"><span class="quality Custom">Custom</span></td>
|
||||
#end if
|
||||
#end if
|
||||
<td align="center"><img src="$sbRoot/images/#if int($curShow.is_sports) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||
<td align="center"><img src="$sbRoot/images/#if int($curShow.is_anime) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||
<td align="center"><img src="$sbRoot/images/#if int($curShow.flatten_folders) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||
<td align="center"><img src="$sbRoot/images/#if int($curShow.paused) == 1 then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" /></td>
|
||||
<td align="center">$curShow.status</td>
|
||||
|
|
|
@ -93,6 +93,17 @@
|
|||
</div><br />
|
||||
</div>
|
||||
|
||||
<div class="optionWrapper">
|
||||
<span class="selectTitle">Anime</span>
|
||||
<div class="selectChoices">
|
||||
<select id="edit_anime" name="anime">
|
||||
<option value="keep">< keep ></option>
|
||||
<option value="enable" #if $anime_value then "selected=\"selected\"" else ""#>enable</option>
|
||||
<option value="disable" #if $anime_value == False then "selected=\"selected\"" else ""#>disable</option>
|
||||
</select>
|
||||
</div><br />
|
||||
</div>
|
||||
|
||||
<div class="optionWrapper">
|
||||
<span class="selectTitle">Subtitles<span class="separator"></span></span>
|
||||
<div class="selectChoices">
|
||||
|
|
210
sickbeard/blackandwhitelist.py
Normal file
210
sickbeard/blackandwhitelist.py
Normal file
|
@ -0,0 +1,210 @@
|
|||
# Author: Dennis Lutter <lad1337@gmail.com>
|
||||
# URL: http://code.google.com/p/sickbeard/
|
||||
#
|
||||
# This file is part of Sick Beard.
|
||||
#
|
||||
# Sick Beard is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Sick Beard is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from sickbeard import db, logger
|
||||
|
||||
class BlackAndWhiteList(object):
|
||||
_tableBlack = "blacklist"
|
||||
_tableWhite = "whitelist"
|
||||
blackList = []
|
||||
whiteList = []
|
||||
blackDict = {}
|
||||
whiteDict = {}
|
||||
|
||||
last_black_valid_result = None
|
||||
last_white_valid_result = None
|
||||
|
||||
def __init__(self, show_id):
|
||||
if not show_id:
|
||||
raise BlackWhitelistNoShowIDException()
|
||||
self.show_id = show_id
|
||||
|
||||
self.myDB = db.DBConnection()
|
||||
self.refresh()
|
||||
|
||||
def refresh(self):
|
||||
logger.log(u"Building black and white list for " + str(self.show_id), logger.DEBUG)
|
||||
|
||||
(self.blackList, self.blackDict) = self.load_blacklist()
|
||||
(self.whiteList, self.whiteDict) = self.load_whitelist()
|
||||
|
||||
def load_blacklist(self):
|
||||
return self._load_list(self._tableBlack)
|
||||
|
||||
def load_whitelist(self):
|
||||
return self._load_list(self._tableWhite)
|
||||
|
||||
def get_black_keywords_for(self, range):
|
||||
if range in self.blackDict:
|
||||
return self.blackDict[range]
|
||||
else:
|
||||
return []
|
||||
|
||||
def get_white_keywords_for(self, range):
|
||||
if range in self.whiteDict:
|
||||
return self.whiteDict[range]
|
||||
else:
|
||||
return []
|
||||
|
||||
def set_black_keywords(self, range, values):
|
||||
self._del_all_black_keywords()
|
||||
self._add_keywords(self._tableBlack, range, values)
|
||||
|
||||
def set_white_keywords(self, range, values):
|
||||
self._del_all_white_keywords()
|
||||
self._add_keywords(self._tableWhite, range, values)
|
||||
|
||||
def set_black_keywords_for(self, range, values):
|
||||
self._del_all_black_keywords_for(range)
|
||||
self._add_keywords(self._tableBlack, range, values)
|
||||
|
||||
def set_white_keywords_for(self, range, values):
|
||||
self._del_all_white_keywords_for(range)
|
||||
self._add_keywords(self._tableWhite, range, values)
|
||||
|
||||
def add_black_keyword(self, range, value):
|
||||
self._add_keywords(self._tableBlack, range, [value])
|
||||
|
||||
def add_white_keyword(self, range, value):
|
||||
self._add_keywords(self._tableWhite, range, [value])
|
||||
|
||||
def get_last_result_msg(self):
|
||||
blackResult = whiteResult = "Untested"
|
||||
if self.last_black_valid_result == True:
|
||||
blackResult = "Valid"
|
||||
elif self.last_black_valid_result == False:
|
||||
blackResult = "Invalid"
|
||||
|
||||
if self.last_white_valid_result == True:
|
||||
whiteResult = "Valid"
|
||||
elif self.last_white_valid_result == False:
|
||||
whiteResult = "Invalid"
|
||||
|
||||
return "Blacklist: " + blackResult + ", Whitelist: " + whiteResult
|
||||
|
||||
def _add_keywords(self, table, range, values):
|
||||
for value in values:
|
||||
self.myDB.action("INSERT INTO " + table + " (show_id, range , keyword) VALUES (?,?,?)", [self.show_id, range, value])
|
||||
self.refresh()
|
||||
|
||||
def _del_all_black_keywords(self):
|
||||
self._del_all_keywords(self._tableBlack)
|
||||
|
||||
def _del_all_white_keywords(self):
|
||||
self._del_all_keywords(self._tableWhite)
|
||||
|
||||
def _del_all_black_keywords_for(self, range):
|
||||
self._del_all_keywords_for(self._tableBlack, range)
|
||||
|
||||
def _del_all_white_keywords_for(self, range):
|
||||
self._del_all_keywords_for(self._tableWhite, range)
|
||||
|
||||
def _del_all_keywords(self, table):
|
||||
logger.log(u"Deleting all " + table + " keywords for " + str(self.show_id), logger.DEBUG)
|
||||
self.myDB.action("DELETE FROM " + table + " WHERE show_id = ?", [self.show_id])
|
||||
self.refresh()
|
||||
|
||||
def _del_all_keywords_for(self, table, range):
|
||||
logger.log(u"Deleting all " + range + " " + table + " keywords for " + str(self.show_id), logger.DEBUG)
|
||||
self.myDB.action("DELETE FROM " + table + " WHERE show_id = ? and range = ?", [self.show_id, range])
|
||||
self.refresh()
|
||||
|
||||
def _load_list(self, table):
|
||||
sqlResults = self.myDB.select("SELECT range,keyword FROM " + table + " WHERE show_id = ? ", [self.show_id])
|
||||
if not sqlResults or not len(sqlResults):
|
||||
return ([], {})
|
||||
|
||||
list, dict = self._build_keyword_dict(sqlResults)
|
||||
logger.log("BWL: " + str(self.show_id) + " loaded keywords from " + table + ": " + str(dict), logger.DEBUG)
|
||||
return list, dict
|
||||
|
||||
def _build_keyword_dict(self, sql_result):
|
||||
list = []
|
||||
dict = {}
|
||||
for row in sql_result:
|
||||
list.append(row["keyword"])
|
||||
if row["range"] in dict:
|
||||
dict[row["range"]].append(row["keyword"])
|
||||
else:
|
||||
dict[row["range"]] = [row["keyword"]]
|
||||
|
||||
return (list, dict)
|
||||
|
||||
def is_valid_for_black(self, haystack):
|
||||
logger.log(u"BWL: " + str(self.show_id) + " is valid black", logger.DEBUG)
|
||||
result = self._is_valid_for(self.blackDict, False, haystack)
|
||||
self.last_black_valid_result = result
|
||||
return result
|
||||
|
||||
def is_valid_for_white(self, haystack):
|
||||
logger.log(u"BWL: " + str(self.show_id) + " is valid white", logger.DEBUG)
|
||||
result = self._is_valid_for(self.whiteDict, True, haystack)
|
||||
self.last_white_valid_result = result
|
||||
return result
|
||||
|
||||
def is_valid(self, haystack):
|
||||
return self.is_valid_for_black(haystack) and self.is_valid_for_white(haystack)
|
||||
|
||||
def _is_valid_for(self, list, mood, haystack):
|
||||
if not len(list):
|
||||
return True
|
||||
|
||||
results = []
|
||||
for range in list:
|
||||
for keyword in list[range]:
|
||||
string = None
|
||||
if range == "global":
|
||||
string = haystack.name
|
||||
elif range in haystack.__dict__:
|
||||
string = haystack.__dict__[range]
|
||||
elif not range in haystack.__dict__:
|
||||
results.append((not mood))
|
||||
else:
|
||||
results.append(False)
|
||||
|
||||
if string:
|
||||
results.append(self._is_keyword_in_string(string, keyword) == mood)
|
||||
|
||||
# black: mood = False
|
||||
# white: mood = True
|
||||
if mood in results:
|
||||
return mood
|
||||
else:
|
||||
return (not mood)
|
||||
|
||||
def _is_keyword_in_string(self, fromPost, fromBWList):
|
||||
"""
|
||||
will return true if fromBWList is found in fromPost
|
||||
for now a basic find is used
|
||||
"""
|
||||
fromPost = fromPost.lower()
|
||||
fromBWList = fromBWList.lower()
|
||||
logger.log(u"BWL: " + str(self.show_id) + " comparing fromPost: " + fromPost + " vs fromBWlist: " + fromBWList, logger.DEBUG)
|
||||
return (fromPost.find(fromBWList) >= 0)
|
||||
|
||||
class BlackWhiteKeyword(object):
|
||||
range = ""
|
||||
value = []
|
||||
|
||||
def __init__(self, range, values):
|
||||
self.range = range # "global" or a parser group
|
||||
self.value = values # a list of values may contain only one item (still a list)
|
||||
|
||||
|
||||
class BlackWhitelistNoShowIDException(Exception):
|
||||
"No show_id was given"
|
|
@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
|
|||
from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
||||
|
||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||
MAX_DB_VERSION = 34
|
||||
MAX_DB_VERSION = 36
|
||||
|
||||
class MainSanityCheck(db.DBSanityCheck):
|
||||
def check(self):
|
||||
|
@ -834,3 +834,30 @@ class AddSceneAbsoluteNumbering(AddAbsoluteNumbering):
|
|||
self.addColumn("scene_numbering", "scene_absolute_number", "NUMERIC", "0")
|
||||
|
||||
self.incDBVersion()
|
||||
|
||||
class AddAnimeBlacklistWhitelist(AddSceneAbsoluteNumbering):
|
||||
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 35
|
||||
|
||||
def execute(self):
|
||||
backupDatabase(35)
|
||||
|
||||
ql = []
|
||||
ql.append(["CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT)"])
|
||||
ql.append(["CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT)"])
|
||||
self.connection.mass_action(ql)
|
||||
|
||||
self.incDBVersion()
|
||||
|
||||
class AddSceneAbsoluteNumbering(AddAnimeBlacklistWhitelist):
|
||||
def test(self):
|
||||
return self.checkDBVersion() >= 36
|
||||
|
||||
def execute(self):
|
||||
backupDatabase(36)
|
||||
|
||||
logger.log(u"Adding column scene_absolute_number to tv_episodes")
|
||||
self.addColumn("tv_episodes", "scene_absolute_number", "NUMERIC", "0")
|
||||
|
||||
self.incDBVersion()
|
||||
|
|
|
@ -680,7 +680,6 @@ def is_anime_in_show_list():
|
|||
def update_anime_support():
|
||||
sickbeard.ANIMESUPPORT = is_anime_in_show_list()
|
||||
|
||||
|
||||
def get_all_episodes_from_absolute_number(show, indexer_id, absolute_numbers):
|
||||
if len(absolute_numbers) == 0:
|
||||
raise EpisodeNotFoundByAbsoluteNumberException()
|
||||
|
|
|
@ -24,7 +24,7 @@ import regexes
|
|||
import time
|
||||
import sickbeard
|
||||
|
||||
from sickbeard import logger, helpers, scene_numbering
|
||||
from sickbeard import logger, helpers, scene_numbering, db
|
||||
from sickbeard.exceptions import EpisodeNotFoundByAbsoluteNumberException
|
||||
from dateutil import parser
|
||||
|
||||
|
@ -53,6 +53,7 @@ class NameParser(object):
|
|||
self._compile_regexes(self.regexMode)
|
||||
self.showList = sickbeard.showList
|
||||
self.useIndexers = useIndexers
|
||||
self.show = show
|
||||
|
||||
def clean_series_name(self, series_name):
|
||||
"""Cleans up series name by removing any . and _
|
||||
|
@ -194,7 +195,12 @@ class NameParser(object):
|
|||
if 'release_group' in named_groups:
|
||||
result.release_group = match.group('release_group')
|
||||
|
||||
show = helpers.get_show_by_name(result.series_name, useIndexer=self.useIndexers)
|
||||
# determin show object for correct regex matching
|
||||
if not self.show:
|
||||
show = helpers.get_show_by_name(result.series_name, useIndexer=self.useIndexers)
|
||||
else:
|
||||
show = self.show
|
||||
|
||||
if show and show.is_anime and cur_regex_type in ['anime', 'normal']:
|
||||
result.show = show
|
||||
return result
|
||||
|
@ -336,6 +342,140 @@ class NameParser(object):
|
|||
return final_result
|
||||
|
||||
|
||||
def scene2indexer(self, show, scene_name, season, episodes, absolute_numbers):
|
||||
if not show: return self # need show object
|
||||
|
||||
# TODO: check if adb and make scene2indexer useable with correct numbers
|
||||
out_season = None
|
||||
out_episodes = []
|
||||
out_absolute_numbers = []
|
||||
|
||||
# is the scene name a special season ?
|
||||
# TODO: define if we get scene seasons or indexer seasons ... for now they are mostly the same ... and i will use them as scene seasons
|
||||
_possible_seasons = sickbeard.scene_exceptions.get_scene_exception_by_name_multiple(scene_name)
|
||||
# filter possible_seasons
|
||||
possible_seasons = []
|
||||
for cur_scene_indexer_id, cur_scene_season in _possible_seasons:
|
||||
if cur_scene_indexer_id and str(cur_scene_indexer_id) != str(show.indexerid):
|
||||
logger.log("Indexer ID mismatch: " + str(show.indexerid) + " now: " + str(cur_scene_indexer_id),
|
||||
logger.ERROR)
|
||||
raise MultipleSceneShowResults("indexerid mismatch")
|
||||
# don't add season -1 since this is a generic name and not a real season... or if we get None
|
||||
# if this was the only result possible_seasons will stay empty and the next parts will look in the general matter
|
||||
if cur_scene_season == -1 or cur_scene_season == None:
|
||||
continue
|
||||
possible_seasons.append(cur_scene_season)
|
||||
# if not possible_seasons: # no special season name was used or we could not find it
|
||||
logger.log(
|
||||
"possible seasons for '" + scene_name + "' (" + str(show.indexerid) + ") are " + str(possible_seasons),
|
||||
logger.DEBUG)
|
||||
|
||||
# lets just get a db connection we will need it anyway
|
||||
cacheDB = db.DBConnection('cache.db')
|
||||
# should we use absolute_numbers -> anime or season, episodes -> normal show
|
||||
if show.is_anime:
|
||||
logger.log(
|
||||
u"'" + show.name + "' is an anime i will scene convert the absolute numbers " + str(absolute_numbers),
|
||||
logger.DEBUG)
|
||||
if possible_seasons:
|
||||
# check if we have a scene_absolute_number in the possible seasons
|
||||
for cur_possible_season in possible_seasons:
|
||||
# and for all absolute numbers
|
||||
for cur_ab_number in absolute_numbers:
|
||||
namesSQlResult = cacheDB.select(
|
||||
"SELECT season, episode, absolute_number FROM xem_numbering WHERE indexer_id = ? and scene_season = ? and scene_absolute_number = ?",
|
||||
[show.indexerid, cur_possible_season, cur_ab_number])
|
||||
if len(namesSQlResult) > 1:
|
||||
logger.log(
|
||||
"Multiple episodes for a absolute number and season. check XEM numbering",
|
||||
logger.ERROR)
|
||||
raise MultipleSceneEpisodeResults("Multiple episodes for a absolute number and season")
|
||||
elif len(namesSQlResult) == 0:
|
||||
break # break out of current absolute_numbers -> next season ... this is not a good sign
|
||||
# if we are here we found ONE episode for this season absolute number
|
||||
# logger.log(u"I found matching episode: " + namesSQlResult[0]['name'], logger.DEBUG)
|
||||
out_episodes.append(int(namesSQlResult[0]['episode']))
|
||||
out_absolute_numbers.append(int(namesSQlResult[0]['absolute_number']))
|
||||
out_season = int(namesSQlResult[0][
|
||||
'season']) # note this will always use the last season we got ... this will be a problem on double episodes that break the season barrier
|
||||
if out_season: # if we found a episode in the cur_possible_season we dont need / want to look at the other season possibilities
|
||||
break
|
||||
else: # no possible seasons from the scene names lets look at this more generic
|
||||
for cur_ab_number in absolute_numbers:
|
||||
namesSQlResult = cacheDB.select(
|
||||
"SELECT season, episode, absolute_number FROM xem_numbering WHERE indexer_id = ? and scene_absolute_number = ?",
|
||||
[show.indexerid, cur_ab_number])
|
||||
if len(namesSQlResult) > 1:
|
||||
logger.log(
|
||||
"Multiple episodes for a absolute number. this might happend because we are missing a scene name for this season. xem lacking behind ?",
|
||||
logger.ERROR)
|
||||
raise MultipleSceneEpisodeResults("Multiple episodes for a absolute number")
|
||||
elif len(namesSQlResult) == 0:
|
||||
continue
|
||||
# if we are here we found ONE episode for this season absolute number
|
||||
# logger.log(u"I found matching episode: " + namesSQlResult[0]['name'], logger.DEBUG)
|
||||
out_episodes.append(int(namesSQlResult[0]['episode']))
|
||||
out_absolute_numbers.append(int(namesSQlResult[0]['absolute_number']))
|
||||
out_season = int(namesSQlResult[0][
|
||||
'season']) # note this will always use the last season we got ... this will be a problem on double episodes that break the season barrier
|
||||
if not out_season: # we did not find anything in the loops ? damit there is no episode
|
||||
logger.log("No episode found for these scene numbers. asuming indexer numbers", logger.DEBUG)
|
||||
# we still have to convert the absolute number to sxxexx ... but that is done not here
|
||||
else:
|
||||
logger.log(u"'" + show.name + "' is a normal show i will scene convert the season and episodes " + str(
|
||||
season) + "x" + str(episodes), logger.DEBUG)
|
||||
out_absolute_numbers = None
|
||||
if possible_seasons:
|
||||
# check if we have a scene_absolute_number in the possible seasons
|
||||
for cur_possible_season in possible_seasons:
|
||||
# and for all episode
|
||||
for cur_episode in episodes:
|
||||
namesSQlResult = cacheDB.select(
|
||||
"SELECT season, episode FROM xem_numbering WHERE indexer_id = ? and scene_season = ? and scene_episode = ?",
|
||||
[show.indexerid, cur_possible_season, cur_episode])
|
||||
if len(namesSQlResult) > 1:
|
||||
logger.log(
|
||||
"Multiple episodes for season episode number combination. this should not be check xem configuration",
|
||||
logger.ERROR)
|
||||
raise MultipleSceneEpisodeResults("Multiple episodes for season episode number combination")
|
||||
elif len(namesSQlResult) == 0:
|
||||
break # break out of current episode -> next season ... this is not a good sign
|
||||
# if we are here we found ONE episode for this season absolute number
|
||||
# logger.log(u"I found matching episode: " + namesSQlResult[0]['name'], logger.DEBUG)
|
||||
out_episodes.append(int(namesSQlResult[0]['episode']))
|
||||
out_season = int(namesSQlResult[0][
|
||||
'season']) # note this will always use the last season we got ... this will be a problem on double episodes that break the season barrier
|
||||
if out_season: # if we found a episode in the cur_possible_season we dont need / want to look at the other posibilites
|
||||
break
|
||||
else: # no possible seasons from the scene names lets look at this more generic
|
||||
for cur_episode in episodes:
|
||||
namesSQlResult = cacheDB.select(
|
||||
"SELECT season, episode FROM xem_numbering WHERE indexer_id = ? and scene_episode = ? and scene_season = ?",
|
||||
[show.indexerid, cur_episode, season])
|
||||
if len(namesSQlResult) > 1:
|
||||
logger.log(
|
||||
"Multiple episodes for season episode number combination. this might happend because we are missing a scene name for this season. xem lacking behind ?",
|
||||
logger.ERROR)
|
||||
raise MultipleSceneEpisodeResults("Multiple episodes for season episode number combination")
|
||||
elif len(namesSQlResult) == 0:
|
||||
continue
|
||||
# if we are here we found ONE episode for this season absolute number
|
||||
# logger.log(u"I found matching episode: " + namesSQlResult[0]['name'], logger.DEBUG)
|
||||
out_episodes.append(int(namesSQlResult[0]['episode']))
|
||||
out_season = int(namesSQlResult[0][
|
||||
'season']) # note this will always use the last season we got ... this will be a problem on double episodes that break the season barrier
|
||||
# this is only done for normal shows
|
||||
if not out_season: # we did not find anything in the loops ? darn there is no episode
|
||||
logger.log("No episode found for these scene numbers. assuming these are valid indexer numbers",
|
||||
logger.DEBUG)
|
||||
out_season = season
|
||||
out_episodes = episodes
|
||||
out_absolute_numbers = absolute_numbers
|
||||
|
||||
# okay that was easy we found the correct season and episode numbers
|
||||
return (out_season, out_episodes, out_absolute_numbers)
|
||||
|
||||
|
||||
class ParseResult(object):
|
||||
def __init__(self,
|
||||
original_name,
|
||||
|
@ -454,7 +594,8 @@ class ParseResult(object):
|
|||
if len(self.ab_episode_numbers):
|
||||
abNo = self.ab_episode_numbers[i]
|
||||
|
||||
(s, e, a) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer, self.season_number,
|
||||
(s, e, a) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer,
|
||||
self.season_number,
|
||||
epNo, abNo)
|
||||
new_episode_numbers.append(e)
|
||||
new_season_numbers.append(s)
|
||||
|
@ -530,3 +671,11 @@ name_parser_cache = NameParserCache()
|
|||
|
||||
class InvalidNameException(Exception):
|
||||
"The given name is not valid"
|
||||
|
||||
|
||||
class MultipleSceneShowResults(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MultipleSceneEpisodeResults(Exception):
|
||||
pass
|
||||
|
|
|
@ -39,6 +39,7 @@ class BTNProvider(generic.TorrentProvider):
|
|||
generic.TorrentProvider.__init__(self, "BTN")
|
||||
|
||||
self.supportsBacklog = True
|
||||
self.supportsAbsoluteNumbering = True
|
||||
|
||||
self.enabled = False
|
||||
self.api_key = None
|
||||
|
@ -211,13 +212,14 @@ class BTNProvider(generic.TorrentProvider):
|
|||
|
||||
# Search for entire seasons: no need to do special things for air by date shows
|
||||
whole_season_params = current_params.copy()
|
||||
partial_season_params = current_params.copy()
|
||||
|
||||
# Search for entire seasons: no need to do special things for air by date shows
|
||||
whole_season_params['category'] = 'Season'
|
||||
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||
# Search for the year of the air by date show
|
||||
whole_season_params['name'] = str(ep_obj.airdate).split('-')[0]
|
||||
elif ep_obj.show.is_anime:
|
||||
whole_season_params['name'] = "%d" % ep_obj.scene_absolute_number
|
||||
else:
|
||||
whole_season_params['name'] = 'Season ' + str(ep_obj.scene_season)
|
||||
|
||||
|
@ -232,9 +234,9 @@ class BTNProvider(generic.TorrentProvider):
|
|||
|
||||
search_params = {'category': 'Episode'}
|
||||
|
||||
if self.show.indexer == 1:
|
||||
if self.show.indexer == 1 and not self.show.is_anime:
|
||||
search_params['tvdb'] = self.show.indexerid
|
||||
elif self.show.indexer == 2:
|
||||
elif self.show.indexer == 2 and not self.show.is_anime:
|
||||
search_params['tvrage'] = self.show.indexerid
|
||||
else:
|
||||
search_params['series'] = sanitizeSceneName(self.show.name)
|
||||
|
@ -251,6 +253,8 @@ class BTNProvider(generic.TorrentProvider):
|
|||
# BTN uses dots in dates, we just search for the date since that
|
||||
# combined with the series identifier should result in just one episode
|
||||
search_params['name'] = date_str.replace('-', '.')
|
||||
elif self.show.is_anime:
|
||||
search_params['name'] = "%i" % int(ep_obj.scene_absolute_number)
|
||||
else:
|
||||
# Do a general name search for the episode, formatted like SXXEYY
|
||||
search_params['name'] = "S%02dE%02d" % (ep_obj.scene_season, ep_obj.scene_episode)
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import urllib
|
||||
import datetime
|
||||
|
||||
import sickbeard
|
||||
import generic
|
||||
|
@ -36,6 +37,7 @@ class Fanzub(generic.NZBProvider):
|
|||
|
||||
self.supportsBacklog = False
|
||||
self.supportsAbsoluteNumbering = True
|
||||
self.anime_only = True
|
||||
|
||||
self.enabled = False
|
||||
|
||||
|
@ -101,24 +103,21 @@ class Fanzub(generic.NZBProvider):
|
|||
results = []
|
||||
|
||||
for i in [2, 3, 4]: # we will look for a version 2, 3 and 4
|
||||
"""
|
||||
because of this the proper search failed !!
|
||||
well more precisly because _doSearch does not accept a dict rather then a string
|
||||
params = {
|
||||
"q":"v"+str(i).encode('utf-8')
|
||||
}
|
||||
"""
|
||||
for curResult in self._doSearch("v" + str(i)):
|
||||
for item in self._doSearch("v" + str(i)):
|
||||
|
||||
match = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', curResult.findtext('pubDate'))
|
||||
if not match:
|
||||
(title, url) = self._get_title_and_url(item)
|
||||
|
||||
if item.has_key('published_parsed') and item['published_parsed']:
|
||||
result_date = item.published_parsed
|
||||
if result_date:
|
||||
result_date = datetime.datetime(*result_date[0:6])
|
||||
else:
|
||||
logger.log(u"Unable to figure out the date for entry " + title + ", skipping it")
|
||||
continue
|
||||
|
||||
dateString = match.group(1)
|
||||
resultDate = parseDate(dateString).replace(tzinfo=None)
|
||||
|
||||
if date == None or resultDate > date:
|
||||
results.append(classes.Proper(curResult.findtext('title'), curResult.findtext('link'), resultDate))
|
||||
if not date or result_date > date:
|
||||
search_result = classes.Proper(title, url, result_date)
|
||||
results.append(search_result)
|
||||
|
||||
return results
|
||||
|
||||
|
@ -145,7 +144,7 @@ class FanzubCache(tvcache.TVCache):
|
|||
|
||||
return self.getRSSFeed(rss_url)
|
||||
|
||||
def _checkAuth(self, data):
|
||||
return self.provider._checkAuthFromData(data)
|
||||
def _checkItemAuth(self, title, url):
|
||||
return True
|
||||
|
||||
provider = Fanzub()
|
||||
|
|
|
@ -55,11 +55,12 @@ class GenericProvider:
|
|||
|
||||
self.supportsBacklog = False
|
||||
self.supportsAbsoluteNumbering = False
|
||||
self.anime_only = False
|
||||
|
||||
self.search_mode = None
|
||||
self.search_fallback = False
|
||||
self.backlog_only = False
|
||||
|
||||
|
||||
self.cache = tvcache.TVCache(self)
|
||||
|
||||
self.session = requests.session()
|
||||
|
@ -254,7 +255,7 @@ class GenericProvider:
|
|||
u"Incomplete Indexer <-> Scene mapping detected for " + epObj.prettyName() + ", skipping search!")
|
||||
continue
|
||||
|
||||
#cacheResult = self.cache.searchCache([epObj], manualSearch)
|
||||
# cacheResult = self.cache.searchCache([epObj], manualSearch)
|
||||
#if len(cacheResult):
|
||||
# results.update({epObj.episode:cacheResult[epObj]})
|
||||
# continue
|
||||
|
@ -275,7 +276,7 @@ class GenericProvider:
|
|||
searchItems[epObj] = itemList
|
||||
|
||||
# if we have cached results return them.
|
||||
#if len(results):
|
||||
# if len(results):
|
||||
# return results
|
||||
|
||||
for ep_obj in searchItems:
|
||||
|
@ -323,7 +324,7 @@ class GenericProvider:
|
|||
continue
|
||||
|
||||
if (parse_result.air_by_date and parse_result.air_date != ep_obj.airdate) or (
|
||||
parse_result.sports and parse_result.sports_event_date != ep_obj.airdate):
|
||||
parse_result.sports and parse_result.sports_event_date != ep_obj.airdate):
|
||||
logger.log("Episode " + title + " didn't air on " + str(ep_obj.airdate) + ", skipping it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
|
|
@ -37,7 +37,7 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
|
||||
self.supportsBacklog = True
|
||||
self.supportsAbsoluteNumbering = True
|
||||
|
||||
self.anime_only = True
|
||||
self.enabled = False
|
||||
self.ratio = None
|
||||
|
||||
|
@ -60,9 +60,7 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
return generic.TorrentProvider.findSearchResults(self, show, season, episodes, search_mode, manualSearch)
|
||||
|
||||
def _get_season_search_strings(self, ep_obj):
|
||||
names = []
|
||||
names.extend(show_name_helpers.makeSceneShowSearchStrings(self.show))
|
||||
return names
|
||||
return show_name_helpers.makeSceneShowSearchStrings(self.show)
|
||||
|
||||
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
||||
return self._get_season_search_strings(ep_obj)
|
||||
|
|
|
@ -68,7 +68,6 @@ def get_scene_numbering(indexer_id, indexer, season, episode, absolute_number=No
|
|||
return xem_result
|
||||
return (season, episode, absolute_number)
|
||||
|
||||
|
||||
def find_scene_numbering(indexer_id, indexer, season, episode, absolute_number=None):
|
||||
"""
|
||||
Same as get_scene_numbering(), but returns None if scene numbering is not set
|
||||
|
@ -400,7 +399,6 @@ def get_xem_numbering_for_season(indexer_id, indexer, season):
|
|||
|
||||
return result
|
||||
|
||||
|
||||
def fix_scene_numbering():
|
||||
ql = []
|
||||
|
||||
|
@ -436,3 +434,40 @@ def fix_scene_numbering():
|
|||
|
||||
if ql:
|
||||
myDB.mass_action(ql)
|
||||
|
||||
def get_ep_mapping(epObj, parse_result):
|
||||
# scores
|
||||
indexer_numbering = 0
|
||||
scene_numbering = 0
|
||||
absolute_numbering = 0
|
||||
|
||||
_possible_seasons = sickbeard.scene_exceptions.get_scene_exception_by_name_multiple(parse_result.series_name)
|
||||
|
||||
# indexer numbering
|
||||
if epObj.season == parse_result.season_number:
|
||||
indexer_numbering += 1
|
||||
elif epObj.episode in parse_result.episode_numbers:
|
||||
indexer_numbering += 1
|
||||
|
||||
# scene numbering
|
||||
if epObj.scene_season == parse_result.season_number:
|
||||
scene_numbering += 1
|
||||
elif epObj.scene_episode in parse_result.episode_numbers:
|
||||
scene_numbering += 1
|
||||
|
||||
# absolute numbering
|
||||
if epObj.show.is_anime and parse_result.is_anime:
|
||||
|
||||
if epObj.absolute_number in parse_result.ab_episode_numbers:
|
||||
absolute_numbering +=1
|
||||
elif epObj.scene_absolute_number in parse_result.ab_episode_numbers:
|
||||
absolute_numbering += 1
|
||||
|
||||
if indexer_numbering == 2:
|
||||
print "indexer numbering"
|
||||
elif scene_numbering == 2:
|
||||
print "scene numbering"
|
||||
elif absolute_numbering == 1:
|
||||
print "indexer numbering"
|
||||
else:
|
||||
print "could not determin numbering"
|
|
@ -42,7 +42,7 @@ from sickbeard import providers
|
|||
from sickbeard import failed_history
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard.providers.generic import GenericProvider, tvcache
|
||||
|
||||
from sickbeard.blackandwhitelist import BlackAndWhiteList
|
||||
|
||||
def _downloadResult(result):
|
||||
"""
|
||||
|
@ -197,11 +197,23 @@ def filter_release_name(name, filter_words):
|
|||
def pickBestResult(results, show, quality_list=None):
|
||||
logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG)
|
||||
|
||||
# build the black And white list
|
||||
bwl = None
|
||||
if show:
|
||||
bwl = BlackAndWhiteList(show.indexerid)
|
||||
else:
|
||||
logger.log("Could not create black and white list no show was given", logger.DEBUG)
|
||||
|
||||
# find the best result for the current episode
|
||||
bestResult = None
|
||||
for cur_result in results:
|
||||
logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality])
|
||||
|
||||
if bwl:
|
||||
if not bwl.is_valid(cur_result):
|
||||
logger.log(cur_result.name+" does not match the blacklist or the whitelist, rejecting it. Result: " + bwl.get_last_result_msg(), logger.MESSAGE)
|
||||
continue
|
||||
|
||||
if quality_list and cur_result.quality not in quality_list:
|
||||
logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG)
|
||||
continue
|
||||
|
@ -254,12 +266,18 @@ def isFinalResult(result):
|
|||
|
||||
show_obj = result.episodes[0].show
|
||||
|
||||
bwl = BlackAndWhiteList(show_obj.indexerid)
|
||||
|
||||
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
||||
|
||||
# if there is a redownload that's higher than this then we definitely need to keep looking
|
||||
if best_qualities and result.quality < max(best_qualities):
|
||||
return False
|
||||
|
||||
# if it does not match the shows black and white list its no good
|
||||
elif not bwl.is_valid(result):
|
||||
return False
|
||||
|
||||
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
||||
elif any_qualities and result.quality in any_qualities:
|
||||
return True
|
||||
|
@ -317,7 +335,7 @@ def filterSearchResults(show, season, results):
|
|||
return foundResults
|
||||
|
||||
|
||||
def searchForNeededEpisodes(episodes):
|
||||
def searchForNeededEpisodes(show, episodes):
|
||||
foundResults = {}
|
||||
|
||||
didSearch = False
|
||||
|
@ -328,6 +346,10 @@ def searchForNeededEpisodes(episodes):
|
|||
for curProviderCount, curProvider in enumerate(providers):
|
||||
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
|
||||
|
||||
if curProvider.anime_only and not show.is_anime:
|
||||
logger.log(u"" + str(show.name) + " is not an anime skiping ...")
|
||||
continue
|
||||
|
||||
try:
|
||||
logger.log(u"Updating RSS cache ...")
|
||||
curProvider.cache.updateCache()
|
||||
|
@ -382,9 +404,10 @@ def searchProviders(show, season, episodes, manualSearch=False):
|
|||
|
||||
# check if we want to search for season packs instead of just season/episode
|
||||
seasonSearch = False
|
||||
seasonEps = show.getAllEpisodes(season)
|
||||
if len(seasonEps) == len(episodes):
|
||||
seasonSearch = True
|
||||
if not manualSearch:
|
||||
seasonEps = show.getAllEpisodes(season)
|
||||
if len(seasonEps) == len(episodes):
|
||||
seasonSearch = True
|
||||
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||
|
||||
|
@ -399,6 +422,10 @@ def searchProviders(show, season, episodes, manualSearch=False):
|
|||
foundResults.setdefault(provider.name, {})
|
||||
searchCount = 0
|
||||
|
||||
if provider.anime_only and not show.is_anime:
|
||||
logger.log(u"" + str(show.name) + " is not an anime skiping ...")
|
||||
continue
|
||||
|
||||
search_mode = 'eponly'
|
||||
if seasonSearch and provider.search_mode == 'sponly':
|
||||
search_mode = provider.search_mode
|
||||
|
|
|
@ -97,7 +97,7 @@ class DailySearchQueueItem(generic_queue.QueueItem):
|
|||
generic_queue.QueueItem.execute(self)
|
||||
|
||||
logger.log("Beginning daily search for [" + self.show.name + "]")
|
||||
foundResults = search.searchForNeededEpisodes(self.segment)
|
||||
foundResults = search.searchForNeededEpisodes(self.show, self.segment)
|
||||
|
||||
# reset thread back to original name
|
||||
threading.currentThread().name = self.thread_name
|
||||
|
|
|
@ -351,6 +351,16 @@ class QueueItemAdd(ShowQueueItem):
|
|||
logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
# before we parse local files lets update exceptions
|
||||
sickbeard.scene_exceptions.retrieve_exceptions()
|
||||
|
||||
# and get scene numbers
|
||||
logger.log(u"Attempting to load scene numbers", logger.DEBUG)
|
||||
if self.show.loadEpisodeSceneNumbers():
|
||||
logger.log(u"loading scene numbers successfull", logger.DEBUG)
|
||||
else:
|
||||
logger.log(u"loading scene numbers NOT successfull or no scene numbers available", logger.DEBUG)
|
||||
|
||||
try:
|
||||
self.show.loadEpisodesFromDir()
|
||||
except Exception, e:
|
||||
|
@ -538,8 +548,13 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
except exceptions.EpisodeDeletedException:
|
||||
pass
|
||||
|
||||
sickbeard.showQueueScheduler.action.refreshShow(self.show, True) #@UndefinedVariable
|
||||
logger.log(u"Attempting to load scene numbers", logger.DEBUG)
|
||||
if self.show.loadEpisodeSceneNumbers():
|
||||
logger.log(u"loading scene numbers successfull", logger.DEBUG)
|
||||
else:
|
||||
logger.log(u"loading scene numbers NOT successfull or no scene numbers available", logger.DEBUG)
|
||||
|
||||
sickbeard.showQueueScheduler.action.refreshShow(self.show, True)
|
||||
|
||||
class QueueItemForceUpdate(QueueItemUpdate):
|
||||
def __init__(self, show=None):
|
||||
|
|
186
sickbeard/tv.py
186
sickbeard/tv.py
|
@ -52,6 +52,7 @@ from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVE
|
|||
from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, \
|
||||
NAMING_LIMITED_EXTEND_E_PREFIXED
|
||||
|
||||
|
||||
class TVShow(object):
|
||||
def __init__(self, indexer, indexerid, lang=""):
|
||||
|
||||
|
@ -96,17 +97,19 @@ class TVShow(object):
|
|||
self.loadFromDB()
|
||||
|
||||
def _is_anime(self):
|
||||
if(self.anime > 0):
|
||||
if (self.anime > 0):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
is_anime = property(_is_anime)
|
||||
|
||||
def _is_sports(self):
|
||||
if(self.sports > 0):
|
||||
if (self.sports > 0):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
is_sports = property(_is_sports)
|
||||
|
||||
def _getLocation(self):
|
||||
|
@ -197,14 +200,25 @@ class TVShow(object):
|
|||
if len(sqlResults) == 1:
|
||||
episode = int(sqlResults[0]["episode"])
|
||||
season = int(sqlResults[0]["season"])
|
||||
logger.log("Found episode by absolute_number:"+str(absolute_number)+" which is "+str(season)+"x"+str(episode), logger.DEBUG)
|
||||
logger.log(
|
||||
"Found episode by absolute_number:" + str(absolute_number) + " which is " + str(season) + "x" + str(
|
||||
episode), logger.DEBUG)
|
||||
elif len(sqlResults) > 1:
|
||||
logger.log("Multiple entries for absolute number: "+str(absolute_number)+" in show: "+self.name+" found ", logger.ERROR)
|
||||
logger.log("Multiple entries for absolute number: " + str(
|
||||
absolute_number) + " in show: " + self.name + " found ", logger.ERROR)
|
||||
return None
|
||||
else:
|
||||
logger.log("No entries for absolute number: "+str(absolute_number)+" in show: "+self.name+" found.", logger.DEBUG)
|
||||
logger.log(
|
||||
"No entries for absolute number: " + str(absolute_number) + " in show: " + self.name + " found.",
|
||||
logger.DEBUG)
|
||||
return None
|
||||
|
||||
def createCurSeasonDict():
|
||||
if not season in self.episodes:
|
||||
self.episodes[season] = {}
|
||||
|
||||
createCurSeasonDict()
|
||||
|
||||
if not episode in self.episodes[season] or self.episodes[season][episode] == None:
|
||||
if noCreate:
|
||||
return None
|
||||
|
@ -221,7 +235,6 @@ class TVShow(object):
|
|||
self.episodes[season][episode] = ep
|
||||
|
||||
epObj = self.episodes[season][episode]
|
||||
epObj.convertToSceneNumbering()
|
||||
|
||||
return epObj
|
||||
|
||||
|
@ -264,7 +277,7 @@ class TVShow(object):
|
|||
# in the first year after ended (last airdate), update every 30 days
|
||||
# in the first year after ended (last airdate), update every 30 days
|
||||
if (update_date - last_airdate) < datetime.timedelta(days=450) and (
|
||||
update_date - last_update_indexer) > datetime.timedelta(days=30):
|
||||
update_date - last_update_indexer) > datetime.timedelta(days=30):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
@ -519,6 +532,33 @@ class TVShow(object):
|
|||
|
||||
return scannedEps
|
||||
|
||||
def loadEpisodeSceneNumbers(self):
|
||||
epList = self.loadEpisodesFromDB()
|
||||
|
||||
sql_l = []
|
||||
for curSeason in epList:
|
||||
for curEp in epList[curSeason]:
|
||||
epObj = self.getEpisode(curSeason, curEp)
|
||||
|
||||
with epObj.lock:
|
||||
(epObj.scene_season, epObj.scene_episode, epObj.scene_absolute_number) = \
|
||||
sickbeard.scene_numbering.get_scene_numbering(self.indexerid, self.indexer, epObj.season,
|
||||
epObj.episode, epObj.absolute_number)
|
||||
logger.log(
|
||||
str(self.indexerid) + ": adding scene numbering. Indexer: " + str(epObj.season) + "x" + str(
|
||||
epObj.episode) + "| Scene: " + str(epObj.scene_season) + "x" + str(epObj.scene_episode),
|
||||
logger.DEBUG)
|
||||
|
||||
# mass add to database
|
||||
if epObj.dirty:
|
||||
sql_l.append(epObj.get_sql())
|
||||
|
||||
if len(sql_l) > 0:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
return True
|
||||
|
||||
def getImages(self, fanart=None, poster=None):
|
||||
fanart_result = poster_result = banner_result = False
|
||||
season_posters_result = season_banners_result = season_all_poster_result = season_all_banner_result = False
|
||||
|
@ -765,7 +805,7 @@ class TVShow(object):
|
|||
if not self.imdbid:
|
||||
self.imdbid = sqlResults[0]["imdb_id"]
|
||||
|
||||
#Get IMDb_info from database
|
||||
# Get IMDb_info from database
|
||||
sqlResults = myDB.select("SELECT * FROM imdb_info WHERE indexer_id = ?", [self.indexerid])
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
|
@ -851,7 +891,7 @@ class TVShow(object):
|
|||
else:
|
||||
imdb_info[key] = imdbTv.get(key.replace('_', ' '))
|
||||
|
||||
#Filter only the value
|
||||
# Filter only the value
|
||||
if imdb_info['runtimes']:
|
||||
imdb_info['runtimes'] = re.search('\d+', imdb_info['runtimes']).group(0)
|
||||
else:
|
||||
|
@ -862,13 +902,13 @@ class TVShow(object):
|
|||
else:
|
||||
imdb_info['akas'] = ''
|
||||
|
||||
#Join all genres in a string
|
||||
# Join all genres in a string
|
||||
if imdb_info['genres']:
|
||||
imdb_info['genres'] = '|'.join(imdb_info['genres'])
|
||||
else:
|
||||
imdb_info['genres'] = ''
|
||||
|
||||
#Get only the production country certificate if any
|
||||
# Get only the production country certificate if any
|
||||
if imdb_info['certificates'] and imdb_info['countries']:
|
||||
dct = {}
|
||||
try:
|
||||
|
@ -889,7 +929,7 @@ class TVShow(object):
|
|||
|
||||
imdb_info['last_update'] = datetime.date.today().toordinal()
|
||||
|
||||
#Rename dict keys without spaces for DB upsert
|
||||
# Rename dict keys without spaces for DB upsert
|
||||
self.imdb_info = dict(
|
||||
(k.replace(' ', '_'), k(v) if hasattr(v, 'keys') else v) for k, v in imdb_info.items())
|
||||
logger.log(str(self.indexerid) + u": Obtained info from IMDb ->" + str(self.imdb_info), logger.DEBUG)
|
||||
|
@ -980,7 +1020,8 @@ class TVShow(object):
|
|||
# if it used to have a file associated with it and it doesn't anymore then set it to IGNORED
|
||||
if curEp.location and curEp.status in Quality.DOWNLOADED:
|
||||
logger.log(str(self.indexerid) + u": Location for " + str(season) + "x" + str(
|
||||
episode) + " doesn't exist, removing it and changing our status to IGNORED", logger.DEBUG)
|
||||
episode) + " doesn't exist, removing it and changing our status to IGNORED",
|
||||
logger.DEBUG)
|
||||
curEp.status = IGNORED
|
||||
curEp.subtitles = list()
|
||||
curEp.subtitles_searchcount = 0
|
||||
|
@ -1008,19 +1049,20 @@ class TVShow(object):
|
|||
hr = (12 + hr, hr)[None is airs.group(3)]
|
||||
min = int((airs.group(2), min)[None is airs.group(2)])
|
||||
airtime = datetime.time(hr, min)
|
||||
|
||||
|
||||
airdatetime = datetime.datetime.combine(ep_obj.airdate, airtime)
|
||||
|
||||
filemtime = datetime.datetime.fromtimestamp(os.path.getmtime(ep_obj.location))
|
||||
|
||||
if filemtime != airdatetime:
|
||||
import time
|
||||
|
||||
airdatetime = airdatetime.timetuple()
|
||||
if self.touch(ep_obj.location, time.mktime(airdatetime)):
|
||||
logger.log(str(self.indexerid) + u": Changed modify date of " + os.path.basename(ep_obj.location)
|
||||
+ " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime))
|
||||
+ " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime))
|
||||
|
||||
def touch(self, fname, atime = None):
|
||||
def touch(self, fname, atime=None):
|
||||
|
||||
if None != atime:
|
||||
try:
|
||||
|
@ -1034,7 +1076,7 @@ class TVShow(object):
|
|||
return False
|
||||
|
||||
def downloadSubtitles(self, force=False):
|
||||
#TODO: Add support for force option
|
||||
# TODO: Add support for force option
|
||||
if not ek.ek(os.path.isdir, self._location):
|
||||
logger.log(str(self.indexerid) + ": Show dir doesn't exist, can't download subtitles", logger.DEBUG)
|
||||
return
|
||||
|
@ -1224,8 +1266,8 @@ class TVEpisode(object):
|
|||
self._season = season
|
||||
self._episode = episode
|
||||
self._absolute_number = 0
|
||||
self._scene_season = season
|
||||
self._scene_episode = episode
|
||||
self._scene_season = 0
|
||||
self._scene_episode = 0
|
||||
self._scene_absolute_number = 0
|
||||
self._description = ""
|
||||
self._subtitles = list()
|
||||
|
@ -1274,7 +1316,7 @@ class TVEpisode(object):
|
|||
status = property(lambda self: self._status, dirty_setter("_status"))
|
||||
indexer = property(lambda self: self._indexer, dirty_setter("_indexer"))
|
||||
indexerid = property(lambda self: self._indexerid, dirty_setter("_indexerid"))
|
||||
#location = property(lambda self: self._location, dirty_setter("_location"))
|
||||
# location = property(lambda self: self._location, dirty_setter("_location"))
|
||||
file_size = property(lambda self: self._file_size, dirty_setter("_file_size"))
|
||||
release_name = property(lambda self: self._release_name, dirty_setter("_release_name"))
|
||||
is_proper = property(lambda self: self._is_proper, dirty_setter("_is_proper"))
|
||||
|
@ -1282,7 +1324,7 @@ class TVEpisode(object):
|
|||
def _set_location(self, new_location):
|
||||
logger.log(u"Setter sets location to " + new_location, logger.DEBUG)
|
||||
|
||||
#self._location = newLocation
|
||||
# self._location = newLocation
|
||||
dirty_setter("_location")(self, new_location)
|
||||
|
||||
if new_location and ek.ek(os.path.isfile, new_location):
|
||||
|
@ -1297,7 +1339,7 @@ class TVEpisode(object):
|
|||
self.subtitles = subtitles.subtitlesLanguages(self.location)
|
||||
|
||||
def downloadSubtitles(self, force=False):
|
||||
#TODO: Add support for force option
|
||||
# TODO: Add support for force option
|
||||
if not ek.ek(os.path.isfile, self.location):
|
||||
logger.log(
|
||||
str(self.show.indexerid) + ": Episode file doesn't exist, can't download subtitles for episode " + str(
|
||||
|
@ -1337,7 +1379,7 @@ class TVEpisode(object):
|
|||
return
|
||||
|
||||
self.refreshSubtitles()
|
||||
self.subtitles_searchcount = self.subtitles_searchcount + 1 if self.subtitles_searchcount else 1 #added the if because sometime it raise an error
|
||||
self.subtitles_searchcount = self.subtitles_searchcount + 1 if self.subtitles_searchcount else 1 # added the if because sometime it raise an error
|
||||
self.subtitles_lastsearch = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
self.saveToDB()
|
||||
|
||||
|
@ -1434,7 +1476,7 @@ class TVEpisode(object):
|
|||
self.episode) + " not found in the database", logger.DEBUG)
|
||||
return False
|
||||
else:
|
||||
#NAMEIT logger.log(u"AAAAA from" + str(self.season)+"x"+str(self.episode) + " -" + self.name + " to " + str(sqlResults[0]["name"]))
|
||||
# NAMEIT logger.log(u"AAAAA from" + str(self.season)+"x"+str(self.episode) + " -" + self.name + " to " + str(sqlResults[0]["name"]))
|
||||
if sqlResults[0]["name"]:
|
||||
self.name = sqlResults[0]["name"]
|
||||
|
||||
|
@ -1449,7 +1491,7 @@ class TVEpisode(object):
|
|||
self.subtitles_searchcount = sqlResults[0]["subtitles_searchcount"]
|
||||
self.subtitles_lastsearch = sqlResults[0]["subtitles_lastsearch"]
|
||||
self.airdate = datetime.date.fromordinal(int(sqlResults[0]["airdate"]))
|
||||
#logger.log(u"1 Status changes from " + str(self.status) + " to " + str(sqlResults[0]["status"]), logger.DEBUG)
|
||||
# logger.log(u"1 Status changes from " + str(self.status) + " to " + str(sqlResults[0]["status"]), logger.DEBUG)
|
||||
self.status = int(sqlResults[0]["status"])
|
||||
|
||||
# don't overwrite my location
|
||||
|
@ -1463,12 +1505,31 @@ class TVEpisode(object):
|
|||
self.indexerid = int(sqlResults[0]["indexerid"])
|
||||
self.indexer = int(sqlResults[0]["indexer"])
|
||||
|
||||
# does one now a better way to test for NULL in the db field ?
|
||||
if sqlResults[0]["scene_season"]:
|
||||
self.scene_season = int(sqlResults[0]["scene_season"])
|
||||
|
||||
if sqlResults[0]["scene_episode"]:
|
||||
self.scene_episode = int(sqlResults[0]["scene_episode"])
|
||||
|
||||
if sqlResults[0]["scene_absolute_number"]:
|
||||
self.scene_absolute_number = int(sqlResults[0]["scene_absolute_number"])
|
||||
|
||||
if sqlResults[0]["release_name"] is not None:
|
||||
self.release_name = sqlResults[0]["release_name"]
|
||||
|
||||
if sqlResults[0]["is_proper"]:
|
||||
self.is_proper = int(sqlResults[0]["is_proper"])
|
||||
|
||||
if self.scene_season == 0 or self.scene_episode == 0 or self.scene_absolute_number == 0:
|
||||
(self.scene_season, self.scene_episode, self.scene_absolute_number) = \
|
||||
sickbeard.scene_numbering.get_scene_numbering(
|
||||
self.show.indexerid,
|
||||
self.show.indexer,
|
||||
self.season,
|
||||
self.episode,
|
||||
self.absolute_number)
|
||||
|
||||
self.dirty = False
|
||||
return True
|
||||
|
||||
|
@ -1534,11 +1595,14 @@ class TVEpisode(object):
|
|||
return False
|
||||
|
||||
if myEp["absolute_number"] == None or myEp["absolute_number"] == "":
|
||||
logger.log(u"This episode ("+self.show.name+" - "+str(season)+"x"+str(episode)+") has no absolute number on " + sickbeard.indexerApi(
|
||||
logger.log(u"This episode (" + self.show.name + " - " + str(season) + "x" + str(
|
||||
episode) + ") has no absolute number on " + sickbeard.indexerApi(
|
||||
self.indexer).name
|
||||
, logger.DEBUG)
|
||||
else:
|
||||
logger.log(str(self.show.indexerid) + ": The absolute_number for " + str(season) + "x" + str(episode)+" is : "+myEp["absolute_number"], logger.DEBUG)
|
||||
logger.log(
|
||||
str(self.show.indexerid) + ": The absolute_number for " + str(season) + "x" + str(episode) + " is : " +
|
||||
myEp["absolute_number"], logger.DEBUG)
|
||||
self.absolute_number = int(myEp["absolute_number"])
|
||||
|
||||
self.name = getattr(myEp, 'episodename', "")
|
||||
|
@ -1563,7 +1627,7 @@ class TVEpisode(object):
|
|||
self.deleteEpisode()
|
||||
return False
|
||||
|
||||
#early conversion to int so that episode doesn't get marked dirty
|
||||
# early conversion to int so that episode doesn't get marked dirty
|
||||
self.indexerid = getattr(myEp, 'id', None)
|
||||
if self.indexerid is None:
|
||||
logger.log(u"Failed to retrieve ID from " + sickbeard.indexerApi(self.indexer).name, logger.ERROR)
|
||||
|
@ -1571,7 +1635,7 @@ class TVEpisode(object):
|
|||
self.deleteEpisode()
|
||||
return False
|
||||
|
||||
#don't update show status if show dir is missing, unless missing show dirs are created during post-processing
|
||||
# don't update show status if show dir is missing, unless missing show dirs are created during post-processing
|
||||
if not ek.ek(os.path.isdir, self.show._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS:
|
||||
logger.log(
|
||||
u"The show dir is missing, not bothering to change the episode statuses since it'd probably be invalid")
|
||||
|
@ -1653,7 +1717,7 @@ class TVEpisode(object):
|
|||
showXML = etree.ElementTree(file=nfoFile)
|
||||
except (SyntaxError, ValueError), e:
|
||||
logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e),
|
||||
logger.ERROR) #TODO: figure out what's wrong and fix it
|
||||
logger.ERROR) # TODO: figure out what's wrong and fix it
|
||||
try:
|
||||
ek.ek(os.rename, nfoFile, nfoFile + ".old")
|
||||
except Exception, e:
|
||||
|
@ -1777,12 +1841,13 @@ class TVEpisode(object):
|
|||
|
||||
# use a custom update/insert method to get the data into the DB
|
||||
return [
|
||||
"INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, absolute_number) VALUES "
|
||||
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
|
||||
"INSERT OR REPLACE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, scene_season, scene_episode, absolute_number, scene_absolute_number) VALUES "
|
||||
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
|
||||
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description,
|
||||
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
||||
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
|
||||
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.absolute_number]]
|
||||
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.scene_season,
|
||||
self.scene_episode, self.absolute_number, self.scene_absolute_number]]
|
||||
|
||||
def saveToDB(self, forceSave=False):
|
||||
"""
|
||||
|
@ -1817,8 +1882,11 @@ class TVEpisode(object):
|
|||
"file_size": self.file_size,
|
||||
"release_name": self.release_name,
|
||||
"is_proper": self.is_proper,
|
||||
"absolute_number": self.absolute_number
|
||||
}
|
||||
"scene_season": self.scene_season,
|
||||
"scene_episode": self.scene_episode,
|
||||
"absolute_number": self.absolute_number,
|
||||
"scene_absolute_number": self.scene_absolute_number
|
||||
}
|
||||
controlValueDict = {"showid": self.show.indexerid,
|
||||
"season": self.season,
|
||||
"episode": self.episode}
|
||||
|
@ -1840,16 +1908,7 @@ class TVEpisode(object):
|
|||
Returns: A string representing the episode's name and season/ep numbers
|
||||
"""
|
||||
|
||||
return self._format_pattern('%SN - %Sx%0E - %EN')
|
||||
|
||||
def prettySceneName(self):
|
||||
"""
|
||||
Returns the name of this episode in a "pretty" human-readable format. Used for logging
|
||||
and notifications and such.
|
||||
|
||||
Returns: A string representing the episode's name and season/ep numbers
|
||||
"""
|
||||
return self._format_pattern('%SN - %XSx%0XE - %EN')
|
||||
return self._format_pattern('Indexer#:[%SN - %Sx%0E - %EN] | Scene#:[%SN - %XSx%0XE - %EN]')
|
||||
|
||||
def _ep_name(self):
|
||||
"""
|
||||
|
@ -1954,7 +2013,7 @@ class TVEpisode(object):
|
|||
'%0XS': '%02d' % self.scene_season,
|
||||
'%XE': str(self.scene_episode),
|
||||
'%0XE': '%02d' % self.scene_episode,
|
||||
'%AN': '%03d' % self.absolute_number,
|
||||
'%AN': '%(#)03d' % {'#': self.absolute_number},
|
||||
'%RN': release_name(self.release_name),
|
||||
'%RG': release_group(self.release_name),
|
||||
'%AD': str(self.airdate).replace('-', ' '),
|
||||
|
@ -2004,6 +2063,9 @@ class TVEpisode(object):
|
|||
if self.show.air_by_date or self.show.sports:
|
||||
result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-SiCKRAGE')
|
||||
result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-sickrage')
|
||||
elif self.show.is_anime:
|
||||
result_name = result_name.replace('%RN', '%S.N.%AN.%E.N-SiCKRAGE')
|
||||
result_name = result_name.replace('%rn', '%s.n.%an.%e.n-sickrage')
|
||||
else:
|
||||
result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-SiCKRAGE')
|
||||
result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-sickrage')
|
||||
|
@ -2099,8 +2161,8 @@ class TVEpisode(object):
|
|||
|
||||
# fill out the template for this piece and then insert this piece into the actual pattern
|
||||
cur_name_group_result = re.sub('(?i)(?x)' + regex_used, regex_replacement, cur_name_group)
|
||||
#cur_name_group_result = cur_name_group.replace(ep_format, ep_string)
|
||||
#logger.log(u"found "+ep_format+" as the ep pattern using "+regex_used+" and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" from "+cur_name_group, logger.DEBUG)
|
||||
# cur_name_group_result = cur_name_group.replace(ep_format, ep_string)
|
||||
# logger.log(u"found "+ep_format+" as the ep pattern using "+regex_used+" and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" from "+cur_name_group, logger.DEBUG)
|
||||
result_name = result_name.replace(cur_name_group, cur_name_group_result)
|
||||
|
||||
result_name = self._format_string(result_name, replace_map)
|
||||
|
@ -2202,7 +2264,8 @@ class TVEpisode(object):
|
|||
self.location)
|
||||
|
||||
if self.show.subtitles and sickbeard.SUBTITLES_DIR != '':
|
||||
related_subs = postProcessor.PostProcessor(self.location).list_associated_files(sickbeard.SUBTITLES_DIR, subtitles_only=True)
|
||||
related_subs = postProcessor.PostProcessor(self.location).list_associated_files(sickbeard.SUBTITLES_DIR,
|
||||
subtitles_only=True)
|
||||
absolute_proper_subs_path = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, self.formatted_filename())
|
||||
|
||||
logger.log(u"Files associated to " + self.location + ": " + str(related_files), logger.DEBUG)
|
||||
|
@ -2218,7 +2281,8 @@ class TVEpisode(object):
|
|||
logger.log(str(self.indexerid) + u": Unable to rename file " + cur_related_file, logger.ERROR)
|
||||
|
||||
for cur_related_sub in related_subs:
|
||||
cur_result = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path,absolute_current_path_no_ext_length)
|
||||
cur_result = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path,
|
||||
absolute_current_path_no_ext_length)
|
||||
if not cur_result:
|
||||
logger.log(str(self.indexerid) + u": Unable to rename file " + cur_related_sub, logger.ERROR)
|
||||
|
||||
|
@ -2240,15 +2304,17 @@ class TVEpisode(object):
|
|||
relEp.saveToDB()
|
||||
|
||||
def convertToSceneNumbering(self):
|
||||
(self.scene_season, self.scene_episode, self.scene_absolute_number) = sickbeard.scene_numbering.get_scene_numbering(self.show.indexerid,
|
||||
self.show.indexer,
|
||||
self.season,
|
||||
self.episode,
|
||||
self.absolute_number)
|
||||
(self.scene_season, self.scene_episode,
|
||||
self.scene_absolute_number) = sickbeard.scene_numbering.get_scene_numbering(self.show.indexerid,
|
||||
self.show.indexer,
|
||||
self.season,
|
||||
self.episode,
|
||||
self.absolute_number)
|
||||
|
||||
def convertToIndexerNumbering(self):
|
||||
(self.season, self.episode, self.absolute_number) = sickbeard.scene_numbering.get_indexer_numbering(self.show.indexerid,
|
||||
self.show.indexer,
|
||||
self.scene_season,
|
||||
self.scene_episode,
|
||||
self.scene_absolute_number)
|
||||
(self.season, self.episode, self.absolute_number) = sickbeard.scene_numbering.get_indexer_numbering(
|
||||
self.show.indexerid,
|
||||
self.show.indexer,
|
||||
self.scene_season,
|
||||
self.scene_episode,
|
||||
self.scene_absolute_number)
|
||||
|
|
|
@ -60,6 +60,8 @@ from sickbeard.scene_exceptions import get_scene_exceptions
|
|||
from sickbeard.scene_numbering import get_scene_numbering, set_scene_numbering, get_scene_numbering_for_show, \
|
||||
get_xem_numbering_for_show
|
||||
|
||||
from sickbeard.blackandwhitelist import BlackAndWhiteList
|
||||
|
||||
from lib.dateutil import tz
|
||||
from lib.unrar2 import RarFile, RarInfo
|
||||
|
||||
|
@ -3042,6 +3044,8 @@ class Home:
|
|||
else:
|
||||
t.sortedShowLists = [["Shows",sorted(sickbeard.showList, lambda x, y: cmp(titler(x.name), titler(y.name)))]]
|
||||
|
||||
t.bwl = BlackAndWhiteList(showObj.indexerid)
|
||||
|
||||
t.epCounts = epCounts
|
||||
t.epCats = epCats
|
||||
|
||||
|
@ -3077,7 +3081,7 @@ class Home:
|
|||
def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], exceptions_list=[],
|
||||
flatten_folders=None, paused=None, directCall=False, air_by_date=None, sports=None, dvdorder=None,
|
||||
indexerLang=None, subtitles=None, archive_firstmatch=None, rls_ignore_words=None,
|
||||
rls_require_words=None, anime=None):
|
||||
rls_require_words=None, anime=None, blackWords=None, whiteWords=None, blacklist=None, whitelist=None):
|
||||
|
||||
if show is None:
|
||||
errString = "Invalid show ID: " + str(show)
|
||||
|
@ -3101,7 +3105,24 @@ class Home:
|
|||
t = PageTemplate(file="editShow.tmpl")
|
||||
t.submenu = HomeMenu()
|
||||
|
||||
bwl = BlackAndWhiteList(showObj.indexerid)
|
||||
t.whiteWords = ""
|
||||
if "global" in bwl.whiteDict:
|
||||
t.whiteWords = ", ".join(bwl.whiteDict["global"])
|
||||
t.blackWords = ""
|
||||
if "global" in bwl.blackDict:
|
||||
t.blackWords = ", ".join(bwl.blackDict["global"])
|
||||
|
||||
if showObj.is_anime:
|
||||
|
||||
t.whitelist = []
|
||||
if bwl.whiteDict.has_key("release_group"):
|
||||
t.whitelist = bwl.whiteDict["release_group"]
|
||||
|
||||
t.blacklist = []
|
||||
if bwl.blackDict.has_key("release_group"):
|
||||
t.blacklist = bwl.blackDict["release_group"]
|
||||
|
||||
t.groups = []
|
||||
if helpers.set_up_anidb_connection():
|
||||
anime = adba.Anime(sickbeard.ADBA_CONNECTION, name=showObj.name)
|
||||
|
@ -3151,6 +3172,55 @@ class Home:
|
|||
else:
|
||||
do_update_exceptions = True
|
||||
|
||||
bwl = BlackAndWhiteList(showObj.indexerid)
|
||||
if whitelist:
|
||||
whitelist = whitelist.split(",")
|
||||
shortWhiteList = []
|
||||
if helpers.set_up_anidb_connection():
|
||||
for groupName in whitelist:
|
||||
group = sickbeard.ADBA_CONNECTION.group(gname=groupName)
|
||||
for line in group.datalines:
|
||||
if line["shortname"]:
|
||||
shortWhiteList.append(line["shortname"])
|
||||
else:
|
||||
if not groupName in shortWhiteList:
|
||||
shortWhiteList.append(groupName)
|
||||
else:
|
||||
shortWhiteList = whitelist
|
||||
bwl.set_white_keywords_for("release_group", shortWhiteList)
|
||||
else:
|
||||
bwl.set_white_keywords_for("release_group", [])
|
||||
|
||||
if blacklist:
|
||||
blacklist = blacklist.split(",")
|
||||
shortBlacklist = []
|
||||
if helpers.set_up_anidb_connection():
|
||||
for groupName in blacklist:
|
||||
group = sickbeard.ADBA_CONNECTION.group(gname=groupName)
|
||||
for line in group.datalines:
|
||||
if line["shortname"]:
|
||||
shortBlacklist.append(line["shortname"])
|
||||
else:
|
||||
if not groupName in shortBlacklist:
|
||||
shortBlacklist.append(groupName)
|
||||
else:
|
||||
shortBlacklist = blacklist
|
||||
bwl.set_black_keywords_for("release_group", shortBlacklist)
|
||||
else:
|
||||
bwl.set_black_keywords_for("release_group", [])
|
||||
|
||||
if whiteWords:
|
||||
whiteWords = [x.strip() for x in whiteWords.split(",")]
|
||||
bwl.set_white_keywords_for("global", whiteWords)
|
||||
else:
|
||||
bwl.set_white_keywords_for("global", [])
|
||||
|
||||
if blackWords:
|
||||
blackWords = [x.strip() for x in blackWords.split(",")]
|
||||
bwl.set_black_keywords_for("global", blackWords)
|
||||
else:
|
||||
bwl.set_black_keywords_for("global", [])
|
||||
|
||||
errors = []
|
||||
with showObj.lock:
|
||||
newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities))
|
||||
|
|
Loading…
Reference in a new issue