mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-03 18:03:37 +00:00
Re-write of Indexer API wrapper
Re-write of New Show seearch function Re-write of Existing show search helper function for determining indexer/indexerid Massive code cleanup and more bugs found and fixed Indexer code fully modualized for future proofing
This commit is contained in:
parent
108df09382
commit
95d7d728e0
128 changed files with 5314 additions and 4198 deletions
|
@ -75,7 +75,7 @@ def loadShowsFromDB():
|
||||||
|
|
||||||
for sqlShow in sqlResults:
|
for sqlShow in sqlResults:
|
||||||
try:
|
try:
|
||||||
curShow = TVShow(sqlShow["indexer"], int(sqlShow["indexer_id"]))
|
curShow = TVShow(int(sqlShow["indexer"]), int(sqlShow["indexer_id"]))
|
||||||
sickbeard.showList.append(curShow)
|
sickbeard.showList.append(curShow)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"There was an error creating the show in " + sqlShow["location"] + ": " + str(e).decode('utf-8'), logger.ERROR)
|
logger.log(u"There was an error creating the show in " + sqlShow["location"] + ": " + str(e).decode('utf-8'), logger.ERROR)
|
||||||
|
|
|
@ -167,11 +167,7 @@
|
||||||
<a href="http://www.imdb.com/title/${cur_result["imdb_id"]}" onclick="window.open(this.href, '_blank'); return false;" title="http://www.imdb.com/title/${cur_result["imdb_id"]}"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" />
|
<a href="http://www.imdb.com/title/${cur_result["imdb_id"]}" onclick="window.open(this.href, '_blank'); return false;" title="http://www.imdb.com/title/${cur_result["imdb_id"]}"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" />
|
||||||
#end if
|
#end if
|
||||||
</td>
|
</td>
|
||||||
#if 'Tvdb' in $cur_result["indexer"]:
|
<td align="center"><a href="$sickbeard.indexerApi($cur_result["indexer"]).config["show_url"]${cur_result["showid"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="$sickbeard.indexerApi($cur_result["indexer"]).config["show_url"]${cur_result["showid"]}"><img alt="["'+$sickbeard.indexerApi($cur_result["indexer"]).name+'"]" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($cur_result["indexer"]).config["icon"]" /></a></td>
|
||||||
<td align="center"><a href="http://thetvdb.com/?tab=series&id=${cur_result["showid"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://thetvdb.com/?tab=series&id=${cur_result["showid"]}"><img alt="[tvdb]" height="16" width="16" src="$sbRoot/images/thetvdb16.png" /></a></td>
|
|
||||||
#else
|
|
||||||
<td align="center"><a href="http://tvrage.com/shows/id-$[cur_result["showid"]]" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://tvrage.com/shows/id-$[cur_result["showid"]]"><img alt="[tvrage]" height="16" width="16" src="$sbRoot/images/tvrage16.png" /></a></td>
|
|
||||||
#end if
|
|
||||||
<td align="center">
|
<td align="center">
|
||||||
<a href="$sbRoot/home/searchEpisode?show=${cur_result["showid"]}&season=$cur_result["season"]&episode=$cur_result["episode"]" title="Manual Search" id="forceUpdate-${cur_result["showid"]}" class="forceUpdate epSearch"><img alt="[search]" height="16" width="16" src="$sbRoot/images/search32.png" id="forceUpdateImage-${cur_result["showid"]}" /></a>
|
<a href="$sbRoot/home/searchEpisode?show=${cur_result["showid"]}&season=$cur_result["season"]&episode=$cur_result["episode"]" title="Manual Search" id="forceUpdate-${cur_result["showid"]}" class="forceUpdate epSearch"><img alt="[search]" height="16" width="16" src="$sbRoot/images/search32.png" id="forceUpdateImage-${cur_result["showid"]}" /></a>
|
||||||
</td>
|
</td>
|
||||||
|
@ -307,11 +303,7 @@
|
||||||
#if $cur_result["imdb_id"]:
|
#if $cur_result["imdb_id"]:
|
||||||
<a href="http://www.imdb.com/title/${cur_result["imdb_id"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/${cur_result["imdb_id"]}"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" />
|
<a href="http://www.imdb.com/title/${cur_result["imdb_id"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/${cur_result["imdb_id"]}"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" />
|
||||||
#end if
|
#end if
|
||||||
#if "Tvdb" in $cur_result["indexer"]:
|
<a href="$sickbeard.indexerApi($cur_result["indexer"]).config["show_url"]${cur_result["showid"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="$sickbeard.indexerApi($cur_result["indexer"]).config["show_url"]${cur_result["showid"]}"><img alt="$sickbeard.indexerApi($cur_result["indexer"]).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($cur_result["indexer"]).config["icon"]" /></a>
|
||||||
<a href="http://thetvdb.com/?tab=series&id=${cur_result["showid"]}" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://thetvdb.com/?tab=series&id=${cur_result["showid"]}"><img alt="[tvdb]" height="16" width="16" src="$sbRoot/images/thetvdb16.png" /></a>
|
|
||||||
#else
|
|
||||||
<a href="http://tvrage.com/shows/id-$[cur_result["showid"]]" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://tvrage.com/shows/id-$[cur_result["showid"]]"><img alt="[tvrage]" height="16" width="16" src="$sbRoot/images/tvrage16.png" /></a>
|
|
||||||
#end if
|
|
||||||
<span><a href="$sbRoot/home/searchEpisode?show=${cur_result["showid"]}&season=$cur_result["season"]&episode=$cur_result["episode"]" title="Manual Search" id="forceUpdate-${cur_result["showid"]}" class="epSearch forceUpdate"><img alt="[search]" height="16" width="16" src="$sbRoot/images/search32.png" id="forceUpdateImage-${cur_result["showid"]}" /></a></span>
|
<span><a href="$sbRoot/home/searchEpisode?show=${cur_result["showid"]}&season=$cur_result["season"]&episode=$cur_result["episode"]" title="Manual Search" id="forceUpdate-${cur_result["showid"]}" class="epSearch forceUpdate"><img alt="[search]" height="16" width="16" src="$sbRoot/images/search32.png" id="forceUpdateImage-${cur_result["showid"]}" /></a></span>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -50,23 +50,14 @@
|
||||||
- $show.genre[1:-1].replace('|',' | ')
|
- $show.genre[1:-1].replace('|',' | ')
|
||||||
#end if
|
#end if
|
||||||
<span class="tvshowLink" style="vertical-align: text-top">
|
<span class="tvshowLink" style="vertical-align: text-top">
|
||||||
#if "Tvdb" in $show.indexer
|
<a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a>
|
||||||
<a href="http://www.thetvdb.com/?tab=series&id=$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="http://www.thetvdb.com/?tab=series&id=$show.indexerid"><img alt="[tvdb]" height="16" width="16" src="$sbRoot/images/thetvdb16.png" style="margin-top: -1px;"/></a>
|
|
||||||
#else
|
|
||||||
<a href="http://www.tvrage.com/shows/id-$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="http://www.tvrage.com/shows/id-$show.indexerid"><img alt="[tvrage]" height="16" width="16" src="$sbRoot/images/tvrage16.png" style="margin-top: -1px;"/></a>
|
|
||||||
#end if
|
|
||||||
</span>
|
</span>
|
||||||
#else
|
#else
|
||||||
<img src="$sbRoot/images/flags/${$show.imdb_info['country_codes']}.png" width="16" height="11" style="margin-top: 3px; margin-left: 3px" /> ($show.imdb_info['year']) - $show.imdb_info['runtimes'] min - $show.imdb_info['genres'].replace('|',' | ')
|
<img src="$sbRoot/images/flags/${$show.imdb_info['country_codes']}.png" width="16" height="11" style="margin-top: 3px; margin-left: 3px" /> ($show.imdb_info['year']) - $show.imdb_info['runtimes'] min - $show.imdb_info['genres'].replace('|',' | ')
|
||||||
<span class="tvshowLink" style="vertical-align: text-top">
|
<span class="tvshowLink" style="vertical-align: text-top">
|
||||||
<a href="http://www.imdb.com/title/$show.imdbid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/$show.imdbid"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" style="margin-top: -1px;"/>
|
<a href="http://www.imdb.com/title/$show.imdbid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/$show.imdbid"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" style="margin-top: -1px;"/>
|
||||||
#if "Tvdb" in $show.indexer
|
<a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a>
|
||||||
<a href="http://www.thetvdb.com/?tab=series&id=$show.indexerid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.thetvdb.com/?tab=series&id=$show.indexerid"><img alt="[tvdb]" height="16" width="16" src="$sbRoot/images/thetvdb16.png" style="margin-top: -1px;"/></a>
|
|
||||||
#else
|
|
||||||
<a href="http://www.tvrage.com/shows/id-$show.indexerid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.tvrage.com/shows/id-$show.indexerid"><img alt="[tvrage]" height="16" width="16" src="$sbRoot/images/tvrage16.png" style="margin-top: -1px;"/></a>
|
|
||||||
#end if
|
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
#end if
|
#end if
|
||||||
</span>
|
</span>
|
||||||
#if $seasonResults:
|
#if $seasonResults:
|
||||||
|
|
|
@ -112,8 +112,8 @@ This <b>DOES NOT</b> allow Sick Beard to download non-english TV episodes!<br />
|
||||||
<input type="checkbox" name="dvdorder" #if $show.dvdorder == 1 then "checked=\"checked\"" else ""# /><br/>
|
<input type="checkbox" name="dvdorder" #if $show.dvdorder == 1 then "checked=\"checked\"" else ""# /><br/>
|
||||||
(check this if you wish to use the DVD order instead of the Airing order)
|
(check this if you wish to use the DVD order instead of the Airing order)
|
||||||
<br/><br/>
|
<br/><br/>
|
||||||
<b>Archive on first match: </b>
|
|
||||||
#if $bestQualities
|
#if $bestQualities
|
||||||
|
<b>Archive on first match: </b>
|
||||||
<input type="checkbox" name="archive_firstmatch" #if $show.archive_firstmatch == 1 then "checked=\"checked\"" else ""# /><br />
|
<input type="checkbox" name="archive_firstmatch" #if $show.archive_firstmatch == 1 then "checked=\"checked\"" else ""# /><br />
|
||||||
(check this to have the episode archived after the first best match is found from your archive quality list)
|
(check this to have the episode archived after the first best match is found from your archive quality list)
|
||||||
<br />
|
<br />
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
#import sickbeard
|
#import sickbeard
|
||||||
#from sickbeard.common import indexerStrings
|
|
||||||
|
|
||||||
#set $rowidx = 0
|
|
||||||
<table id="addRootDirTable" class="sickbeardTable tablesorter">
|
<table id="addRootDirTable" class="sickbeardTable tablesorter">
|
||||||
<thead><tr><th width="1%"><input type="checkbox" id="checkAll" checked=checked></th><th>Directory</th><th width="20%">Show Name (tvshow.nfo)<th width="20%">Indexer</td></tr></thead>
|
<thead><tr><th width="1%"><input type="checkbox" id="checkAll" checked=checked></th><th>Directory</th><th width="20%">Show Name (tvshow.nfo)<th width="20%">Indexer</td></tr></thead>
|
||||||
<tfoot>
|
<tfoot>
|
||||||
|
@ -16,27 +14,23 @@
|
||||||
#end if
|
#end if
|
||||||
|
|
||||||
#set $show_id = $curDir['dir']
|
#set $show_id = $curDir['dir']
|
||||||
#set $indexer = 'Tvdb'
|
#set $indexer = 1
|
||||||
#if $curDir['existing_info'][0]:
|
#if $curDir['existing_info'][0]:
|
||||||
#set $show_id = $show_id + '|' + $str($curDir['existing_info'][0]) + '|' + str($curDir['existing_info'][1])
|
#set $show_id = $show_id + '|' + $str($curDir['existing_info'][0]) + '|' + $str($curDir['existing_info'][1])
|
||||||
#set $indexer = $str($curDir['existing_info'][2])
|
#set $indexer = $curDir['existing_info'][2]
|
||||||
#end if
|
#end if
|
||||||
|
|
||||||
#set $rowidx = $rowidx + 1
|
<tr>
|
||||||
|
|
||||||
<tr id=$rowidx>
|
|
||||||
<td><input type="checkbox" id="$show_id" class="dirCheck" checked=checked></td>
|
<td><input type="checkbox" id="$show_id" class="dirCheck" checked=checked></td>
|
||||||
<td><label for="$show_id">$curDir['display_dir']</label></td>
|
<td><label for="$show_id">$curDir['display_dir']</label></td>
|
||||||
#if 'Tvdb' in $indexer
|
#if $curDir['existing_info'][1]:
|
||||||
<td>#if $curDir['existing_info'][0] and $curDir['existing_info'][1] then '<a href="http://thetvdb.com/?tab=series&id='+$str($curDir['existing_info'][0])+'">'+$curDir['existing_info'][1]+'</a>' else "?"#</td>
|
<td><a href="$sickbeard.indexerApi($indexer).config["show_url"]$curDir['existing_info'][0]">$curDir['existing_info'][1]</a></td>
|
||||||
#elif 'TVRage' in $indexer
|
#else:
|
||||||
<td>#if $curDir['existing_info'][1] then '<a href="http://tvrage.com/shows/id-'+$str($curDir['existing_info'][0])+'">'+$curDir['existing_info'][1]+'</a>' else "?"#</td>
|
<td>?</td>
|
||||||
#else
|
|
||||||
<td>$curDir['existing_info'][1]</td>
|
|
||||||
#end if
|
#end if
|
||||||
<td>
|
<td>
|
||||||
<select name="indexer">
|
<select name="indexer">
|
||||||
#for $curIndexer in sorted($indexerStrings.items(), key=lambda x: x[1]):
|
#for $curIndexer in $sickbeard.indexerApi().indexers.items():
|
||||||
<option value="$curIndexer[0]" #if $curIndexer[0] == $indexer then "selected=\"selected\"" else ""#>$curIndexer[1]</option>
|
<option value="$curIndexer[0]" #if $curIndexer[0] == $indexer then "selected=\"selected\"" else ""#>$curIndexer[1]</option>
|
||||||
#end for
|
#end for
|
||||||
</select>
|
</select>
|
||||||
|
|
|
@ -27,15 +27,11 @@
|
||||||
<form id="addShowForm" method="post" action="$sbRoot/home/addShows/addNewShow" accept-charset="utf-8">
|
<form id="addShowForm" method="post" action="$sbRoot/home/addShows/addNewShow" accept-charset="utf-8">
|
||||||
|
|
||||||
<fieldset class="sectionwrap">
|
<fieldset class="sectionwrap">
|
||||||
<legend class="legendStep">Find a show on the TVDB and TVRAGE</legend>
|
<legend class="legendStep">Find a show on the TVDB or TVRAGE</legend>
|
||||||
|
|
||||||
<div class="stepDiv">
|
<div class="stepDiv">
|
||||||
#if $use_provided_info:
|
#if $use_provided_info:
|
||||||
#if 'Tvdb' in $provided_indexer
|
Show retrieved from existing metadata: <a href="$sickbeard.indexerApi($provided_indexer).config["show_url"]$provided_indexer_id">$provided_indexer_name</a>
|
||||||
Show retrieved from existing metadata: <a href="http://thetvdb.com/?tab=series&id=$provided_indexer_id">$provided_indexer_name</a>
|
|
||||||
#else
|
|
||||||
Show retrieved from existing metadata: <a href="http://tvrage.com/shows/id-$provided_indexer_id">$provided_indexer_name</a>
|
|
||||||
#end if
|
|
||||||
<input type="hidden" name="indexerLang" value="en" />
|
<input type="hidden" name="indexerLang" value="en" />
|
||||||
<input type="hidden" name="whichSeries" value="$provided_indexer_id" />
|
<input type="hidden" name="whichSeries" value="$provided_indexer_id" />
|
||||||
<input type="hidden" id="providedName" value="$provided_indexer_name" />
|
<input type="hidden" id="providedName" value="$provided_indexer_name" />
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#import sickbeard
|
#import sickbeard
|
||||||
#from sickbeard.common import indexerStrings
|
|
||||||
#set global $header="Post Processing"
|
#set global $header="Post Processing"
|
||||||
#set global $title="Post Processing"
|
#set global $title="Post Processing"
|
||||||
|
|
||||||
|
@ -18,8 +17,8 @@
|
||||||
<b>Enter the folder containing the episode:</b> <input type="text" name="dir" id="episodeDir" size="50" /><br/>
|
<b>Enter the folder containing the episode:</b> <input type="text" name="dir" id="episodeDir" size="50" /><br/>
|
||||||
<b>Show Indexer to be used:</b>
|
<b>Show Indexer to be used:</b>
|
||||||
<select name="indexer" id="indexer" class="indexer">
|
<select name="indexer" id="indexer" class="indexer">
|
||||||
#for $curIndexer in sorted($indexerStrings.items(), key=lambda x: x[1]):
|
#for $curIndexer in sorted($sickbeard.indexerApi().indexers.items(), key=lambda x: x[1]):
|
||||||
<option value="$curIndexer[0]" #if $curIndexer[0] in $indexerStrings then "selected=\"selected\"" else ""#>$curIndexer[1]</option>
|
<option value="$curIndexer[0]" #if $curIndexer[0] in $sickbeard.indexerApi().indexers then "selected=\"selected\"" else ""#>$curIndexer[1]</option>
|
||||||
#end for
|
#end for
|
||||||
</select>
|
</select>
|
||||||
<br/>
|
<br/>
|
||||||
|
@ -39,7 +38,6 @@
|
||||||
<b>Mark Dir/Files as priority download:</b> <input id="is_priority" name="is_priority" type="checkbox">
|
<b>Mark Dir/Files as priority download:</b> <input id="is_priority" name="is_priority" type="checkbox">
|
||||||
<span style="line-height: 0; font-size: 12px;"><i>(Check it to replace the file even if it exists at higher quality)</i></span><br/>
|
<span style="line-height: 0; font-size: 12px;"><i>(Check it to replace the file even if it exists at higher quality)</i></span><br/>
|
||||||
#if $sickbeard.USE_FAILED_DOWNLOADS:
|
#if $sickbeard.USE_FAILED_DOWNLOADS:
|
||||||
|
|
||||||
<b>Mark download as failed:</b>  
|
<b>Mark download as failed:</b>  
|
||||||
<input id="failed" name="failed" type="checkbox"><br />
|
<input id="failed" name="failed" type="checkbox"><br />
|
||||||
#end if
|
#end if
|
||||||
|
@ -47,7 +45,6 @@
|
||||||
<input id="submit" class="btn btn-primary" type="submit" value="Process" />
|
<input id="submit" class="btn btn-primary" type="submit" value="Process" />
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
|
|
||||||
<script type="text/javascript" charset="utf-8">
|
<script type="text/javascript" charset="utf-8">
|
||||||
<!--
|
<!--
|
||||||
jQuery('#episodeDir').fileBrowser({ title: 'Select Unprocessed Episode Folder', key: 'postprocessPath' });
|
jQuery('#episodeDir').fileBrowser({ title: 'Select Unprocessed Episode Folder', key: 'postprocessPath' });
|
||||||
|
|
|
@ -115,7 +115,7 @@ $(document).ready(function(){
|
||||||
$("#checkboxControls input").change(function(e){
|
$("#checkboxControls input").change(function(e){
|
||||||
var whichClass = $(this).attr('id')
|
var whichClass = $(this).attr('id')
|
||||||
$(this).showHideRows(whichClass)
|
$(this).showHideRows(whichClass)
|
||||||
return
|
|
||||||
$('tr.'+whichClass).each(function(i){
|
$('tr.'+whichClass).each(function(i){
|
||||||
$(this).toggle();
|
$(this).toggle();
|
||||||
});
|
});
|
||||||
|
|
|
@ -54,28 +54,28 @@ $(document).ready(function () {
|
||||||
|
|
||||||
var whichSeries = obj.join('|');
|
var whichSeries = obj.join('|');
|
||||||
|
|
||||||
|
|
||||||
resultStr += '<input type="radio" id="whichSeries" name="whichSeries" value="' + whichSeries + '"' + checked + ' /> ';
|
resultStr += '<input type="radio" id="whichSeries" name="whichSeries" value="' + whichSeries + '"' + checked + ' /> ';
|
||||||
if (obj[0] == 'Tvdb' && data.langid && data.langid != "") {
|
if (data.langid && data.langid != "") {
|
||||||
resultStr += '<a href="http://thetvdb.com/?tab=series&id=' + obj[1] + '&lid=' + data.langid + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[2] + '</b></a>';
|
resultStr += '<a href="'+ obj[2] + obj[3] + '&lid=' + data.langid + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[4] + '</b></a>';
|
||||||
} else if (obj[0] == 'Tvdb') {
|
|
||||||
resultStr += '<a href="http://thetvdb.com/?tab=series&id=' + obj[1] + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[2] + '</b></a>';
|
|
||||||
} else {
|
} else {
|
||||||
resultStr += '<a href="http://tvrage.com/shows/id-' + obj[1] + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[2] + '</b></a>';
|
resultStr += '<a href="'+ obj[2] + obj[3] + '" onclick=\"window.open(this.href, \'_blank\'); return false;\" ><b>' + obj[4] + '</b></a>';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (obj[3] !== null) {
|
if (obj[5] !== null) {
|
||||||
var startDate = new Date(obj[3]);
|
var startDate = new Date(obj[5]);
|
||||||
var today = new Date();
|
var today = new Date();
|
||||||
if (startDate > today) {
|
if (startDate > today) {
|
||||||
resultStr += ' (will debut on ' + obj[3] + ')';
|
resultStr += ' (will debut on ' + obj[5] + ')';
|
||||||
} else {
|
} else {
|
||||||
resultStr += ' (started on ' + obj[3] + ')';
|
resultStr += ' (started on ' + obj[5] + ')';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (obj[0] !== null) {
|
if (obj[0] !== null) {
|
||||||
resultStr += ' [' + obj[0] + ']';
|
resultStr += ' [' + obj[0] + ']';
|
||||||
}
|
}
|
||||||
|
|
||||||
resultStr += '<br />';
|
resultStr += '<br />';
|
||||||
});
|
});
|
||||||
resultStr += '</ul>';
|
resultStr += '</ul>';
|
||||||
|
@ -146,7 +146,7 @@ $(document).ready(function () {
|
||||||
var show_name, sep_char;
|
var show_name, sep_char;
|
||||||
// if they've picked a radio button then use that
|
// if they've picked a radio button then use that
|
||||||
if ($('input:radio[name=whichSeries]:checked').length) {
|
if ($('input:radio[name=whichSeries]:checked').length) {
|
||||||
show_name = $('input:radio[name=whichSeries]:checked').val().split('|')[2];
|
show_name = $('input:radio[name=whichSeries]:checked').val().split('|')[4];
|
||||||
}
|
}
|
||||||
// if we provided a show in the hidden field, use that
|
// if we provided a show in the hidden field, use that
|
||||||
else if ($('input:hidden[name=whichSeries]').length && $('input:hidden[name=whichSeries]').val().length) {
|
else if ($('input:hidden[name=whichSeries]').length && $('input:hidden[name=whichSeries]').val().length) {
|
||||||
|
|
|
@ -46,12 +46,15 @@ from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound,
|
||||||
|
|
||||||
# Cached Session Handler
|
# Cached Session Handler
|
||||||
from lib.httpcache import CachingHTTPAdapter
|
from lib.httpcache import CachingHTTPAdapter
|
||||||
|
|
||||||
s = requests.Session()
|
s = requests.Session()
|
||||||
s.mount('http://', CachingHTTPAdapter())
|
s.mount('http://', CachingHTTPAdapter())
|
||||||
|
|
||||||
|
|
||||||
def log():
|
def log():
|
||||||
return logging.getLogger("tvdb_api")
|
return logging.getLogger("tvdb_api")
|
||||||
|
|
||||||
|
|
||||||
class ShowContainer(dict):
|
class ShowContainer(dict):
|
||||||
"""Simple dict that holds a series of Show instances
|
"""Simple dict that holds a series of Show instances
|
||||||
"""
|
"""
|
||||||
|
@ -81,6 +84,7 @@ class ShowContainer(dict):
|
||||||
class Show(dict):
|
class Show(dict):
|
||||||
"""Holds a dict of seasons, and show data.
|
"""Holds a dict of seasons, and show data.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
dict.__init__(self)
|
dict.__init__(self)
|
||||||
self.data = {}
|
self.data = {}
|
||||||
|
@ -126,7 +130,7 @@ class Show(dict):
|
||||||
raise tvdb_episodenotfound("Could not find any episodes that aired on %s" % date)
|
raise tvdb_episodenotfound("Could not find any episodes that aired on %s" % date)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def search(self, term = None, key = None):
|
def search(self, term=None, key=None):
|
||||||
"""
|
"""
|
||||||
Search all episodes in show. Can search all data, or a specific key (for
|
Search all episodes in show. Can search all data, or a specific key (for
|
||||||
example, episodename)
|
example, episodename)
|
||||||
|
@ -179,7 +183,7 @@ class Show(dict):
|
||||||
"""
|
"""
|
||||||
results = []
|
results = []
|
||||||
for cur_season in self.values():
|
for cur_season in self.values():
|
||||||
searchresult = cur_season.search(term = term, key = key)
|
searchresult = cur_season.search(term=term, key=key)
|
||||||
if len(searchresult) != 0:
|
if len(searchresult) != 0:
|
||||||
results.extend(searchresult)
|
results.extend(searchresult)
|
||||||
|
|
||||||
|
@ -187,7 +191,7 @@ class Show(dict):
|
||||||
|
|
||||||
|
|
||||||
class Season(dict):
|
class Season(dict):
|
||||||
def __init__(self, show = None):
|
def __init__(self, show=None):
|
||||||
"""The show attribute points to the parent show
|
"""The show attribute points to the parent show
|
||||||
"""
|
"""
|
||||||
self.show = show
|
self.show = show
|
||||||
|
@ -208,7 +212,7 @@ class Season(dict):
|
||||||
else:
|
else:
|
||||||
return dict.__getitem__(self, episode_number)
|
return dict.__getitem__(self, episode_number)
|
||||||
|
|
||||||
def search(self, term = None, key = None):
|
def search(self, term=None, key=None):
|
||||||
"""Search all episodes in season, returns a list of matching Episode
|
"""Search all episodes in season, returns a list of matching Episode
|
||||||
instances.
|
instances.
|
||||||
|
|
||||||
|
@ -221,7 +225,7 @@ class Season(dict):
|
||||||
"""
|
"""
|
||||||
results = []
|
results = []
|
||||||
for ep in self.values():
|
for ep in self.values():
|
||||||
searchresult = ep.search(term = term, key = key)
|
searchresult = ep.search(term=term, key=key)
|
||||||
if searchresult is not None:
|
if searchresult is not None:
|
||||||
results.append(
|
results.append(
|
||||||
searchresult
|
searchresult
|
||||||
|
@ -230,7 +234,7 @@ class Season(dict):
|
||||||
|
|
||||||
|
|
||||||
class Episode(dict):
|
class Episode(dict):
|
||||||
def __init__(self, season = None):
|
def __init__(self, season=None):
|
||||||
"""The season attribute points to the parent season
|
"""The season attribute points to the parent season
|
||||||
"""
|
"""
|
||||||
self.season = season
|
self.season = season
|
||||||
|
@ -255,7 +259,7 @@ class Episode(dict):
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise tvdb_attributenotfound("Cannot find attribute %s" % (repr(key)))
|
raise tvdb_attributenotfound("Cannot find attribute %s" % (repr(key)))
|
||||||
|
|
||||||
def search(self, term = None, key = None):
|
def search(self, term=None, key=None):
|
||||||
"""Search episode data for term, if it matches, return the Episode (self).
|
"""Search episode data for term, if it matches, return the Episode (self).
|
||||||
The key parameter can be used to limit the search to a specific element,
|
The key parameter can be used to limit the search to a specific element,
|
||||||
for example, episodename.
|
for example, episodename.
|
||||||
|
@ -286,7 +290,7 @@ class Episode(dict):
|
||||||
if key is not None and cur_key != key:
|
if key is not None and cur_key != key:
|
||||||
# Do not search this key
|
# Do not search this key
|
||||||
continue
|
continue
|
||||||
if cur_value.find( unicode(term).lower() ) > -1:
|
if cur_value.find(unicode(term).lower()) > -1:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
@ -305,6 +309,7 @@ class Actor(dict):
|
||||||
role,
|
role,
|
||||||
sortorder
|
sortorder
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<Actor \"%s\">" % (self.get("name"))
|
return "<Actor \"%s\">" % (self.get("name"))
|
||||||
|
|
||||||
|
@ -315,17 +320,18 @@ class Tvdb:
|
||||||
>>> t['Scrubs'][1][24]['episodename']
|
>>> t['Scrubs'][1][24]['episodename']
|
||||||
u'My Last Day'
|
u'My Last Day'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
interactive = False,
|
interactive=False,
|
||||||
select_first = False,
|
select_first=False,
|
||||||
debug = False,
|
debug=False,
|
||||||
cache = True,
|
cache=True,
|
||||||
banners = False,
|
banners=False,
|
||||||
actors = False,
|
actors=False,
|
||||||
custom_ui = None,
|
custom_ui=None,
|
||||||
language = None,
|
language=None,
|
||||||
search_all_languages = False,
|
search_all_languages=False,
|
||||||
apikey = None,
|
apikey=None,
|
||||||
forceConnect=False,
|
forceConnect=False,
|
||||||
useZip=False,
|
useZip=False,
|
||||||
dvdorder=False):
|
dvdorder=False):
|
||||||
|
@ -454,8 +460,8 @@ class Tvdb:
|
||||||
# Hard-coded here as it is realtively static, and saves another HTTP request, as
|
# Hard-coded here as it is realtively static, and saves another HTTP request, as
|
||||||
# recommended on http://thetvdb.com/wiki/index.php/API:languages.xml
|
# recommended on http://thetvdb.com/wiki/index.php/API:languages.xml
|
||||||
self.config['valid_languages'] = [
|
self.config['valid_languages'] = [
|
||||||
"da", "fi", "nl", "de", "it", "es", "fr","pl", "hu","el","tr",
|
"da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr",
|
||||||
"ru","he","ja","pt","zh","cs","sl", "hr","ko","en","sv","no"
|
"ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no"
|
||||||
]
|
]
|
||||||
|
|
||||||
# thetvdb.com should be based around numeric language codes,
|
# thetvdb.com should be based around numeric language codes,
|
||||||
|
@ -591,9 +597,9 @@ class Tvdb:
|
||||||
if sid not in self.shows:
|
if sid not in self.shows:
|
||||||
self.shows[sid] = Show()
|
self.shows[sid] = Show()
|
||||||
if seas not in self.shows[sid]:
|
if seas not in self.shows[sid]:
|
||||||
self.shows[sid][seas] = Season(show = self.shows[sid])
|
self.shows[sid][seas] = Season(show=self.shows[sid])
|
||||||
if ep not in self.shows[sid][seas]:
|
if ep not in self.shows[sid][seas]:
|
||||||
self.shows[sid][seas][ep] = Episode(season = self.shows[sid][seas])
|
self.shows[sid][seas][ep] = Episode(season=self.shows[sid][seas])
|
||||||
self.shows[sid][seas][ep][attrib] = value
|
self.shows[sid][seas][ep][attrib] = value
|
||||||
|
|
||||||
def _setShowData(self, sid, key, value):
|
def _setShowData(self, sid, key, value):
|
||||||
|
@ -610,6 +616,7 @@ class Tvdb:
|
||||||
- Replaces & with &
|
- Replaces & with &
|
||||||
- Trailing whitespace
|
- Trailing whitespace
|
||||||
"""
|
"""
|
||||||
|
if isinstance(data, str):
|
||||||
data = data.replace(u"&", u"&")
|
data = data.replace(u"&", u"&")
|
||||||
data = data.strip()
|
data = data.strip()
|
||||||
return data
|
return data
|
||||||
|
@ -618,19 +625,11 @@ class Tvdb:
|
||||||
"""This searches TheTVDB.com for the series name
|
"""This searches TheTVDB.com for the series name
|
||||||
and returns the result list
|
and returns the result list
|
||||||
"""
|
"""
|
||||||
series = urllib.quote(series.encode("utf-8"))
|
series = series.encode("utf-8")
|
||||||
log().debug("Searching for show %s" % series)
|
log().debug("Searching for show %s" % series)
|
||||||
self.config['params_getSeries']['seriesname'] = series
|
self.config['params_getSeries']['seriesname'] = series
|
||||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||||
allSeries = []
|
allSeries = [dict((s.tag.lower(), s.text) for s in x.getchildren()) for x in seriesEt]
|
||||||
for series in seriesEt:
|
|
||||||
result = dict((k.tag.lower(), k.text) for k in series.getchildren())
|
|
||||||
result['id'] = int(result['id'])
|
|
||||||
result['lid'] = self.config['langabbv_to_id'][result['language']]
|
|
||||||
if 'aliasnames' in result:
|
|
||||||
result['aliasnames'] = result['aliasnames'].split("|")
|
|
||||||
log().debug('Found series %(seriesname)s' % result)
|
|
||||||
allSeries.append(result)
|
|
||||||
|
|
||||||
return allSeries
|
return allSeries
|
||||||
|
|
||||||
|
@ -648,14 +647,14 @@ class Tvdb:
|
||||||
|
|
||||||
if self.config['custom_ui'] is not None:
|
if self.config['custom_ui'] is not None:
|
||||||
log().debug("Using custom UI %s" % (repr(self.config['custom_ui'])))
|
log().debug("Using custom UI %s" % (repr(self.config['custom_ui'])))
|
||||||
ui = self.config['custom_ui'](config = self.config)
|
ui = self.config['custom_ui'](config=self.config)
|
||||||
else:
|
else:
|
||||||
if not self.config['interactive']:
|
if not self.config['interactive']:
|
||||||
log().debug('Auto-selecting first search result using BaseUI')
|
log().debug('Auto-selecting first search result using BaseUI')
|
||||||
ui = BaseUI(config = self.config)
|
ui = BaseUI(config=self.config)
|
||||||
else:
|
else:
|
||||||
log().debug('Interactively selecting show using ConsoleUI')
|
log().debug('Interactively selecting show using ConsoleUI')
|
||||||
ui = ConsoleUI(config = self.config)
|
ui = ConsoleUI(config=self.config)
|
||||||
|
|
||||||
return ui.selectSeries(allSeries)
|
return ui.selectSeries(allSeries)
|
||||||
|
|
||||||
|
@ -678,7 +677,7 @@ class Tvdb:
|
||||||
This interface will be improved in future versions.
|
This interface will be improved in future versions.
|
||||||
"""
|
"""
|
||||||
log().debug('Getting season banners for %s' % (sid))
|
log().debug('Getting season banners for %s' % (sid))
|
||||||
bannersEt = self._getetsrc( self.config['url_seriesBanner'] % (sid) )
|
bannersEt = self._getetsrc(self.config['url_seriesBanner'] % (sid))
|
||||||
banners = {}
|
banners = {}
|
||||||
for cur_banner in bannersEt.findall('Banner'):
|
for cur_banner in bannersEt.findall('Banner'):
|
||||||
bid = cur_banner.find('id').text
|
bid = cur_banner.find('id').text
|
||||||
|
@ -753,7 +752,7 @@ class Tvdb:
|
||||||
cur_actors.append(curActor)
|
cur_actors.append(curActor)
|
||||||
self._setShowData(sid, '_actors', cur_actors)
|
self._setShowData(sid, '_actors', cur_actors)
|
||||||
|
|
||||||
def _getShowData(self, sid, language):
|
def _getShowData(self, sid, language, seriesSearch=False):
|
||||||
"""Takes a series ID, gets the epInfo URL and parses the TVDB
|
"""Takes a series ID, gets the epInfo URL and parses the TVDB
|
||||||
XML file into the shows dict in layout:
|
XML file into the shows dict in layout:
|
||||||
shows[series_id][season_number][episode_number]
|
shows[series_id][season_number][episode_number]
|
||||||
|
@ -778,17 +777,27 @@ class Tvdb:
|
||||||
seriesInfoEt = self._getetsrc(
|
seriesInfoEt = self._getetsrc(
|
||||||
self.config['url_seriesInfo'] % (sid, getShowInLanguage)
|
self.config['url_seriesInfo'] % (sid, getShowInLanguage)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if seriesInfoEt is None: return False
|
||||||
for curInfo in seriesInfoEt.findall("Series")[0]:
|
for curInfo in seriesInfoEt.findall("Series")[0]:
|
||||||
tag = curInfo.tag.lower()
|
tag = curInfo.tag.lower()
|
||||||
value = curInfo.text
|
value = curInfo.text
|
||||||
|
|
||||||
|
if tag == 'seriesname' and value is None:
|
||||||
|
return False
|
||||||
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
|
if tag == 'id':
|
||||||
|
value = int(value)
|
||||||
|
|
||||||
if tag in ['banner', 'fanart', 'poster']:
|
if tag in ['banner', 'fanart', 'poster']:
|
||||||
value = self.config['url_artworkPrefix'] % (value)
|
value = self.config['url_artworkPrefix'] % (value)
|
||||||
else:
|
else:
|
||||||
value = self._cleanData(value)
|
value = self._cleanData(value)
|
||||||
|
|
||||||
self._setShowData(sid, tag, value)
|
self._setShowData(sid, tag, value)
|
||||||
|
if seriesSearch:
|
||||||
|
return True
|
||||||
|
|
||||||
# Parse banners
|
# Parse banners
|
||||||
if self.config['banners_enabled']:
|
if self.config['banners_enabled']:
|
||||||
|
@ -806,7 +815,7 @@ class Tvdb:
|
||||||
else:
|
else:
|
||||||
url = self.config['url_epInfo'] % (sid, language)
|
url = self.config['url_epInfo'] % (sid, language)
|
||||||
|
|
||||||
epsEt = self._getetsrc( url, language=language)
|
epsEt = self._getetsrc(url, language=language)
|
||||||
|
|
||||||
for cur_ep in epsEt.findall("Episode"):
|
for cur_ep in epsEt.findall("Episode"):
|
||||||
|
|
||||||
|
@ -844,28 +853,34 @@ class Tvdb:
|
||||||
tag = cur_item.tag.lower()
|
tag = cur_item.tag.lower()
|
||||||
value = cur_item.text
|
value = cur_item.text
|
||||||
if value is not None:
|
if value is not None:
|
||||||
|
if tag == 'id':
|
||||||
|
value = int(value)
|
||||||
|
|
||||||
if tag == 'filename':
|
if tag == 'filename':
|
||||||
value = self.config['url_artworkPrefix'] % (value)
|
value = self.config['url_artworkPrefix'] % (value)
|
||||||
else:
|
else:
|
||||||
value = self._cleanData(value)
|
value = self._cleanData(value)
|
||||||
self._setItem(sid, seas_no, ep_no, tag, value)
|
self._setItem(sid, seas_no, ep_no, tag, value)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def _nameToSid(self, name):
|
def _nameToSid(self, name):
|
||||||
"""Takes show name, returns the correct series ID (if the show has
|
"""Takes show name, returns the correct series ID (if the show has
|
||||||
already been grabbed), or grabs all episodes and returns
|
already been grabbed), or grabs all episodes and returns
|
||||||
the correct SID.
|
the correct SID.
|
||||||
"""
|
"""
|
||||||
|
sid = set()
|
||||||
if name in self.corrections:
|
if name in self.corrections:
|
||||||
log().debug('Correcting %s to %s' % (name, self.corrections[name]) )
|
log().debug('Correcting %s to %s' % (name, self.corrections[name]))
|
||||||
sid = self.corrections[name]
|
sid = self.corrections[name]
|
||||||
else:
|
else:
|
||||||
log().debug('Getting show %s' % (name))
|
log().debug('Getting show %s' % (name))
|
||||||
selected_series = self._getSeries( name )
|
selected_series = self._getSeries(name)
|
||||||
sname, sid = selected_series['seriesname'], selected_series['id']
|
if isinstance(selected_series, dict):
|
||||||
log().debug('Got %(seriesname)s, id %(id)s' % selected_series)
|
selected_series = [selected_series]
|
||||||
|
[sid.add(int(x['id'])) for x in selected_series if
|
||||||
self.corrections[name] = sid
|
self._getShowData(int(x['id']), self.config['language'], seriesSearch=True)]
|
||||||
self._getShowData(selected_series['id'], selected_series['language'])
|
[self.corrections.update({x['seriesname']: int(x['id'])}) for x in selected_series]
|
||||||
|
|
||||||
return sid
|
return sid
|
||||||
|
|
||||||
|
@ -880,9 +895,8 @@ class Tvdb:
|
||||||
return self.shows[key]
|
return self.shows[key]
|
||||||
|
|
||||||
key = key.lower() # make key lower case
|
key = key.lower() # make key lower case
|
||||||
sid = self._nameToSid(key)
|
sids = self._nameToSid(key)
|
||||||
log().debug('Got series id %s' % (sid))
|
return [self.shows[sid] for sid in sids]
|
||||||
return self.shows[sid]
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self.shows)
|
return str(self.shows)
|
||||||
|
@ -893,11 +907,13 @@ def main():
|
||||||
grabs an episode name interactively.
|
grabs an episode name interactively.
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
|
||||||
tvdb_instance = Tvdb(interactive=True, cache=False)
|
tvdb_instance = Tvdb(interactive=True, cache=False)
|
||||||
print tvdb_instance['Lost']['seriesname']
|
print tvdb_instance['Lost']['seriesname']
|
||||||
print tvdb_instance['Lost'][1][4]['episodename']
|
print tvdb_instance['Lost'][1][4]['episodename']
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -29,6 +29,7 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import xml.etree.ElementTree as ElementTree
|
import xml.etree.ElementTree as ElementTree
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
from lib.dateutil.parser import parse
|
from lib.dateutil.parser import parse
|
||||||
from lib import requests
|
from lib import requests
|
||||||
|
|
||||||
|
@ -318,8 +319,8 @@ class TVRage:
|
||||||
|
|
||||||
self.config['base_url'] = "http://services.tvrage.com"
|
self.config['base_url'] = "http://services.tvrage.com"
|
||||||
|
|
||||||
self.config['url_getSeries'] = u"%(base_url)s/myfeeds/search.php" % self.config
|
self.config['url_getSeries'] = u"%(base_url)s/feeds/search.php" % self.config
|
||||||
self.config['params_getSeries'] = {"key": self.config['apikey'], "show": ""}
|
self.config['params_getSeries'] = {"show": ""}
|
||||||
|
|
||||||
self.config['url_epInfo'] = u"%(base_url)s/myfeeds/episode_list.php" % self.config
|
self.config['url_epInfo'] = u"%(base_url)s/myfeeds/episode_list.php" % self.config
|
||||||
self.config['params_epInfo'] = {"key": self.config['apikey'], "sid": ""}
|
self.config['params_epInfo'] = {"key": self.config['apikey'], "sid": ""}
|
||||||
|
@ -473,6 +474,7 @@ class TVRage:
|
||||||
- Replaces & with &
|
- Replaces & with &
|
||||||
- Trailing whitespace
|
- Trailing whitespace
|
||||||
"""
|
"""
|
||||||
|
if isinstance(data, str):
|
||||||
data = data.replace(u"&", u"&")
|
data = data.replace(u"&", u"&")
|
||||||
data = data.strip()
|
data = data.strip()
|
||||||
return data
|
return data
|
||||||
|
@ -481,19 +483,11 @@ class TVRage:
|
||||||
"""This searches tvrage.com for the series name
|
"""This searches tvrage.com for the series name
|
||||||
and returns the result list
|
and returns the result list
|
||||||
"""
|
"""
|
||||||
series = urllib.quote(series.encode("utf-8"))
|
series = series.encode("utf-8")
|
||||||
log().debug("Searching for show %s" % series)
|
log().debug("Searching for show %s" % series)
|
||||||
self.config['params_getSeries']['show'] = series
|
self.config['params_getSeries']['show'] = series
|
||||||
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
|
||||||
allSeries = []
|
allSeries = [dict((s.tag.lower(),s.text) for s in x.getchildren()) for x in seriesEt]
|
||||||
seriesResult = {}
|
|
||||||
for series in seriesEt:
|
|
||||||
for k in series.getchildren():
|
|
||||||
seriesResult.setdefault(k.tag.lower(), k.text)
|
|
||||||
|
|
||||||
seriesResult['id'] = int(seriesResult['id'])
|
|
||||||
log().debug('Found series %s' % seriesResult['seriesname'])
|
|
||||||
allSeries.append(seriesResult)
|
|
||||||
|
|
||||||
return allSeries
|
return allSeries
|
||||||
|
|
||||||
|
@ -518,7 +512,7 @@ class TVRage:
|
||||||
|
|
||||||
return ui.selectSeries(allSeries)
|
return ui.selectSeries(allSeries)
|
||||||
|
|
||||||
def _getShowData(self, sid):
|
def _getShowData(self, sid, seriesSearch=False):
|
||||||
"""Takes a series ID, gets the epInfo URL and parses the TVRAGE
|
"""Takes a series ID, gets the epInfo URL and parses the TVRAGE
|
||||||
XML file into the shows dict in layout:
|
XML file into the shows dict in layout:
|
||||||
shows[series_id][season_number][episode_number]
|
shows[series_id][season_number][episode_number]
|
||||||
|
@ -532,14 +526,22 @@ class TVRage:
|
||||||
self.config['params_seriesInfo']
|
self.config['params_seriesInfo']
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if seriesInfoEt is None: return False
|
||||||
for curInfo in seriesInfoEt:
|
for curInfo in seriesInfoEt:
|
||||||
tag = curInfo.tag.lower()
|
tag = curInfo.tag.lower()
|
||||||
value = curInfo.text
|
value = curInfo.text
|
||||||
|
|
||||||
|
if tag == 'seriesname' and value is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if tag == 'id':
|
||||||
|
value = int(value)
|
||||||
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
value = self._cleanData(value)
|
value = self._cleanData(value)
|
||||||
|
|
||||||
self._setShowData(sid, tag, value)
|
self._setShowData(sid, tag, value)
|
||||||
|
if seriesSearch: return True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Parse genre data
|
# Parse genre data
|
||||||
|
@ -572,28 +574,32 @@ class TVRage:
|
||||||
|
|
||||||
value = cur_item.text
|
value = cur_item.text
|
||||||
if value is not None:
|
if value is not None:
|
||||||
|
if tag == 'id':
|
||||||
|
value = int(value)
|
||||||
|
|
||||||
value = self._cleanData(value)
|
value = self._cleanData(value)
|
||||||
|
|
||||||
self._setItem(sid, seas_no, ep_no, tag, value)
|
self._setItem(sid, seas_no, ep_no, tag, value)
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
return True
|
||||||
|
|
||||||
def _nameToSid(self, name):
|
def _nameToSid(self, name):
|
||||||
"""Takes show name, returns the correct series ID (if the show has
|
"""Takes show name, returns the correct series ID (if the show has
|
||||||
already been grabbed), or grabs all episodes and returns
|
already been grabbed), or grabs all episodes and returns
|
||||||
the correct SID.
|
the correct SID.
|
||||||
"""
|
"""
|
||||||
|
sid = set()
|
||||||
if name in self.corrections:
|
if name in self.corrections:
|
||||||
log().debug('Correcting %s to %s' % (name, self.corrections[name]) )
|
log().debug('Correcting %s to %s' % (name, self.corrections[name]) )
|
||||||
sid = self.corrections[name]
|
sid = self.corrections[name]
|
||||||
else:
|
else:
|
||||||
log().debug('Getting show %s' % (name))
|
log().debug('Getting show %s' % (name))
|
||||||
selected_series = self._getSeries( name )
|
selected_series = self._getSeries( name )
|
||||||
sname, sid = selected_series['seriesname'], selected_series['id']
|
if isinstance(selected_series, dict):
|
||||||
log().debug('Got %(seriesname)s, id %(id)s' % selected_series)
|
selected_series = [selected_series]
|
||||||
|
[sid.add(int(x['id'])) for x in selected_series if self._getShowData(int(x['id']), seriesSearch=True)]
|
||||||
self.corrections[name] = sid
|
[self.corrections.update({x['seriesname']:int(x['id'])}) for x in selected_series]
|
||||||
self._getShowData(selected_series['id'])
|
|
||||||
|
|
||||||
return sid
|
return sid
|
||||||
|
|
||||||
|
@ -608,9 +614,8 @@ class TVRage:
|
||||||
return self.shows[key]
|
return self.shows[key]
|
||||||
|
|
||||||
key = key.lower() # make key lower case
|
key = key.lower() # make key lower case
|
||||||
sid = self._nameToSid(key)
|
sids = self._nameToSid(key)
|
||||||
log().debug('Got series id %s' % (sid))
|
return [self.shows[sid] for sid in sids]
|
||||||
return self.shows[sid]
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self.shows)
|
return str(self.shows)
|
||||||
|
|
|
@ -29,15 +29,19 @@ import urllib
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
|
|
||||||
# apparently py2exe won't build these unless they're imported somewhere
|
# apparently py2exe won't build these unless they're imported somewhere
|
||||||
from sickbeard import providers, metadata, indexers
|
from sickbeard import providers, metadata
|
||||||
from indexers import indexer_api, indexer_exceptions
|
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \
|
||||||
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen
|
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen
|
||||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator
|
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator
|
||||||
from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, subtitles, traktWatchListChecker
|
from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
||||||
|
subtitles, traktWatchListChecker
|
||||||
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
|
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard import naming
|
from sickbeard import naming
|
||||||
from sickbeard import scene_numbering
|
from sickbeard import scene_numbering
|
||||||
|
from indexers.indexer_api import indexerApi
|
||||||
|
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
|
||||||
|
indexer_attributenotfound, indexer_seasonnotfound, indexer_userabort, indexerExcepts
|
||||||
|
|
||||||
from common import SD, SKIPPED, NAMING_REPEAT
|
from common import SD, SKIPPED, NAMING_REPEAT
|
||||||
|
|
||||||
|
@ -410,7 +414,6 @@ DATE_PRESET = None
|
||||||
TIME_PRESET = None
|
TIME_PRESET = None
|
||||||
TIME_PRESET_W_SECONDS = None
|
TIME_PRESET_W_SECONDS = None
|
||||||
|
|
||||||
|
|
||||||
USE_SUBTITLES = False
|
USE_SUBTITLES = False
|
||||||
SUBTITLES_LANGUAGES = []
|
SUBTITLES_LANGUAGES = []
|
||||||
SUBTITLES_DIR = ''
|
SUBTITLES_DIR = ''
|
||||||
|
@ -434,12 +437,13 @@ TMDB_API_KEY = 'edc5f123313769de83a71e157758030b'
|
||||||
|
|
||||||
__INITIALIZED__ = False
|
__INITIALIZED__ = False
|
||||||
|
|
||||||
|
|
||||||
def get_backlog_cycle_time():
|
def get_backlog_cycle_time():
|
||||||
cycletime = SEARCH_FREQUENCY*2+7
|
cycletime = SEARCH_FREQUENCY * 2 + 7
|
||||||
return max([cycletime, 720])
|
return max([cycletime, 720])
|
||||||
|
|
||||||
def initialize(consoleLogging=True):
|
|
||||||
|
|
||||||
|
def initialize(consoleLogging=True):
|
||||||
with INIT_LOCK:
|
with INIT_LOCK:
|
||||||
|
|
||||||
global ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
|
global ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
|
||||||
|
@ -470,7 +474,7 @@ def initialize(consoleLogging=True):
|
||||||
showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \
|
showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \
|
||||||
NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_STRIP_YEAR, \
|
NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_STRIP_YEAR, \
|
||||||
RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
|
RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
|
||||||
WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList,\
|
WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList, \
|
||||||
EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \
|
EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, \
|
||||||
USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \
|
USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \
|
||||||
USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, \
|
USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, \
|
||||||
|
@ -534,14 +538,12 @@ def initialize(consoleLogging=True):
|
||||||
WEB_PASSWORD = check_setting_str(CFG, 'General', 'web_password', '')
|
WEB_PASSWORD = check_setting_str(CFG, 'General', 'web_password', '')
|
||||||
LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
|
LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
|
||||||
|
|
||||||
|
|
||||||
LOCALHOST_IP = check_setting_str(CFG, 'General', 'localhost_ip', '')
|
LOCALHOST_IP = check_setting_str(CFG, 'General', 'localhost_ip', '')
|
||||||
ANON_REDIRECT = check_setting_str(CFG, 'General', 'anon_redirect', 'http://dereferer.org/?')
|
ANON_REDIRECT = check_setting_str(CFG, 'General', 'anon_redirect', 'http://dereferer.org/?')
|
||||||
# attempt to help prevent users from breaking links by using a bad url
|
# attempt to help prevent users from breaking links by using a bad url
|
||||||
if not ANON_REDIRECT.endswith('?'):
|
if not ANON_REDIRECT.endswith('?'):
|
||||||
ANON_REDIRECT = ''
|
ANON_REDIRECT = ''
|
||||||
|
|
||||||
|
|
||||||
UPDATE_SHOWS_ON_START = bool(check_setting_int(CFG, 'General', 'update_shows_on_start', 0))
|
UPDATE_SHOWS_ON_START = bool(check_setting_int(CFG, 'General', 'update_shows_on_start', 0))
|
||||||
SORT_ARTICLE = bool(check_setting_int(CFG, 'General', 'sort_article', 0))
|
SORT_ARTICLE = bool(check_setting_int(CFG, 'General', 'sort_article', 0))
|
||||||
|
|
||||||
|
@ -763,7 +765,8 @@ def initialize(consoleLogging=True):
|
||||||
USE_TWITTER = bool(check_setting_int(CFG, 'Twitter', 'use_twitter', 0))
|
USE_TWITTER = bool(check_setting_int(CFG, 'Twitter', 'use_twitter', 0))
|
||||||
TWITTER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsnatch', 0))
|
TWITTER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsnatch', 0))
|
||||||
TWITTER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_ondownload', 0))
|
TWITTER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_ondownload', 0))
|
||||||
TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Twitter', 'twitter_notify_onsubtitledownload', 0))
|
TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||||
|
check_setting_int(CFG, 'Twitter', 'twitter_notify_onsubtitledownload', 0))
|
||||||
TWITTER_USERNAME = check_setting_str(CFG, 'Twitter', 'twitter_username', '')
|
TWITTER_USERNAME = check_setting_str(CFG, 'Twitter', 'twitter_username', '')
|
||||||
TWITTER_PASSWORD = check_setting_str(CFG, 'Twitter', 'twitter_password', '')
|
TWITTER_PASSWORD = check_setting_str(CFG, 'Twitter', 'twitter_password', '')
|
||||||
TWITTER_PREFIX = check_setting_str(CFG, 'Twitter', 'twitter_prefix', 'Sick Beard')
|
TWITTER_PREFIX = check_setting_str(CFG, 'Twitter', 'twitter_prefix', 'Sick Beard')
|
||||||
|
@ -777,13 +780,15 @@ def initialize(consoleLogging=True):
|
||||||
USE_PUSHOVER = bool(check_setting_int(CFG, 'Pushover', 'use_pushover', 0))
|
USE_PUSHOVER = bool(check_setting_int(CFG, 'Pushover', 'use_pushover', 0))
|
||||||
PUSHOVER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsnatch', 0))
|
PUSHOVER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsnatch', 0))
|
||||||
PUSHOVER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_ondownload', 0))
|
PUSHOVER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_ondownload', 0))
|
||||||
PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0))
|
PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||||
|
check_setting_int(CFG, 'Pushover', 'pushover_notify_onsubtitledownload', 0))
|
||||||
PUSHOVER_USERKEY = check_setting_str(CFG, 'Pushover', 'pushover_userkey', '')
|
PUSHOVER_USERKEY = check_setting_str(CFG, 'Pushover', 'pushover_userkey', '')
|
||||||
|
|
||||||
USE_LIBNOTIFY = bool(check_setting_int(CFG, 'Libnotify', 'use_libnotify', 0))
|
USE_LIBNOTIFY = bool(check_setting_int(CFG, 'Libnotify', 'use_libnotify', 0))
|
||||||
LIBNOTIFY_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsnatch', 0))
|
LIBNOTIFY_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsnatch', 0))
|
||||||
LIBNOTIFY_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_ondownload', 0))
|
LIBNOTIFY_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_ondownload', 0))
|
||||||
LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsubtitledownload', 0))
|
LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||||
|
check_setting_int(CFG, 'Libnotify', 'libnotify_notify_onsubtitledownload', 0))
|
||||||
|
|
||||||
USE_NMJ = bool(check_setting_int(CFG, 'NMJ', 'use_nmj', 0))
|
USE_NMJ = bool(check_setting_int(CFG, 'NMJ', 'use_nmj', 0))
|
||||||
NMJ_HOST = check_setting_str(CFG, 'NMJ', 'nmj_host', '')
|
NMJ_HOST = check_setting_str(CFG, 'NMJ', 'nmj_host', '')
|
||||||
|
@ -798,9 +803,12 @@ def initialize(consoleLogging=True):
|
||||||
USE_SYNOINDEX = bool(check_setting_int(CFG, 'Synology', 'use_synoindex', 0))
|
USE_SYNOINDEX = bool(check_setting_int(CFG, 'Synology', 'use_synoindex', 0))
|
||||||
|
|
||||||
USE_SYNOLOGYNOTIFIER = bool(check_setting_int(CFG, 'SynologyNotifier', 'use_synologynotifier', 0))
|
USE_SYNOLOGYNOTIFIER = bool(check_setting_int(CFG, 'SynologyNotifier', 'use_synologynotifier', 0))
|
||||||
SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsnatch', 0))
|
SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = bool(
|
||||||
SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_ondownload', 0))
|
check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsnatch', 0))
|
||||||
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsubtitledownload', 0))
|
SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = bool(
|
||||||
|
check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_ondownload', 0))
|
||||||
|
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||||
|
check_setting_int(CFG, 'SynologyNotifier', 'synologynotifier_notify_onsubtitledownload', 0))
|
||||||
|
|
||||||
USE_TRAKT = bool(check_setting_int(CFG, 'Trakt', 'use_trakt', 0))
|
USE_TRAKT = bool(check_setting_int(CFG, 'Trakt', 'use_trakt', 0))
|
||||||
TRAKT_USERNAME = check_setting_str(CFG, 'Trakt', 'trakt_username', '')
|
TRAKT_USERNAME = check_setting_str(CFG, 'Trakt', 'trakt_username', '')
|
||||||
|
@ -831,13 +839,15 @@ def initialize(consoleLogging=True):
|
||||||
USE_PUSHALOT = bool(check_setting_int(CFG, 'Pushalot', 'use_pushalot', 0))
|
USE_PUSHALOT = bool(check_setting_int(CFG, 'Pushalot', 'use_pushalot', 0))
|
||||||
PUSHALOT_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsnatch', 0))
|
PUSHALOT_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsnatch', 0))
|
||||||
PUSHALOT_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_ondownload', 0))
|
PUSHALOT_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_ondownload', 0))
|
||||||
PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsubtitledownload', 0))
|
PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||||
|
check_setting_int(CFG, 'Pushalot', 'pushalot_notify_onsubtitledownload', 0))
|
||||||
PUSHALOT_AUTHORIZATIONTOKEN = check_setting_str(CFG, 'Pushalot', 'pushalot_authorizationtoken', '')
|
PUSHALOT_AUTHORIZATIONTOKEN = check_setting_str(CFG, 'Pushalot', 'pushalot_authorizationtoken', '')
|
||||||
|
|
||||||
USE_PUSHBULLET = bool(check_setting_int(CFG, 'Pushbullet', 'use_pushbullet', 0))
|
USE_PUSHBULLET = bool(check_setting_int(CFG, 'Pushbullet', 'use_pushbullet', 0))
|
||||||
PUSHBULLET_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsnatch', 0))
|
PUSHBULLET_NOTIFY_ONSNATCH = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsnatch', 0))
|
||||||
PUSHBULLET_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_ondownload', 0))
|
PUSHBULLET_NOTIFY_ONDOWNLOAD = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_ondownload', 0))
|
||||||
PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = bool(check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsubtitledownload', 0))
|
PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = bool(
|
||||||
|
check_setting_int(CFG, 'Pushbullet', 'pushbullet_notify_onsubtitledownload', 0))
|
||||||
PUSHBULLET_API = check_setting_str(CFG, 'Pushbullet', 'pushbullet_api', '')
|
PUSHBULLET_API = check_setting_str(CFG, 'Pushbullet', 'pushbullet_api', '')
|
||||||
PUSHBULLET_DEVICE = check_setting_str(CFG, 'Pushbullet', 'pushbullet_device', '')
|
PUSHBULLET_DEVICE = check_setting_str(CFG, 'Pushbullet', 'pushbullet_device', '')
|
||||||
|
|
||||||
|
@ -859,7 +869,9 @@ def initialize(consoleLogging=True):
|
||||||
SUBTITLES_LANGUAGES = []
|
SUBTITLES_LANGUAGES = []
|
||||||
SUBTITLES_DIR = check_setting_str(CFG, 'Subtitles', 'subtitles_dir', '')
|
SUBTITLES_DIR = check_setting_str(CFG, 'Subtitles', 'subtitles_dir', '')
|
||||||
SUBTITLES_SERVICES_LIST = check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_LIST', '').split(',')
|
SUBTITLES_SERVICES_LIST = check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_LIST', '').split(',')
|
||||||
SUBTITLES_SERVICES_ENABLED = [int(x) for x in check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_ENABLED', '').split('|') if x]
|
SUBTITLES_SERVICES_ENABLED = [int(x) for x in
|
||||||
|
check_setting_str(CFG, 'Subtitles', 'SUBTITLES_SERVICES_ENABLED', '').split('|')
|
||||||
|
if x]
|
||||||
SUBTITLES_DEFAULT = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_default', 0))
|
SUBTITLES_DEFAULT = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_default', 0))
|
||||||
SUBTITLES_HISTORY = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_history', 0))
|
SUBTITLES_HISTORY = bool(check_setting_int(CFG, 'Subtitles', 'subtitles_history', 0))
|
||||||
SUBTITLES_FINDER_FREQUENCY = check_setting_int(CFG, 'Subtitles', 'subtitles_finder_frequency', 1)
|
SUBTITLES_FINDER_FREQUENCY = check_setting_int(CFG, 'Subtitles', 'subtitles_finder_frequency', 1)
|
||||||
|
@ -873,7 +885,8 @@ def initialize(consoleLogging=True):
|
||||||
|
|
||||||
CALENDAR_UNPROTECTED = bool(check_setting_int(CFG, 'General', 'calendar_unprotected', 0))
|
CALENDAR_UNPROTECTED = bool(check_setting_int(CFG, 'General', 'calendar_unprotected', 0))
|
||||||
|
|
||||||
EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'General', 'extra_scripts', '').split('|') if x.strip()]
|
EXTRA_SCRIPTS = [x.strip() for x in check_setting_str(CFG, 'General', 'extra_scripts', '').split('|') if
|
||||||
|
x.strip()]
|
||||||
|
|
||||||
USE_LISTVIEW = bool(check_setting_int(CFG, 'General', 'use_listview', 0))
|
USE_LISTVIEW = bool(check_setting_int(CFG, 'General', 'use_listview', 0))
|
||||||
|
|
||||||
|
@ -895,7 +908,7 @@ def initialize(consoleLogging=True):
|
||||||
COMING_EPS_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'coming_eps_missed_range', 7)
|
COMING_EPS_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'coming_eps_missed_range', 7)
|
||||||
DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x')
|
DATE_PRESET = check_setting_str(CFG, 'GUI', 'date_preset', '%x')
|
||||||
TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p')
|
TIME_PRESET_W_SECONDS = check_setting_str(CFG, 'GUI', 'time_preset', '%I:%M:%S %p')
|
||||||
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S",u"")
|
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u":%S", u"")
|
||||||
|
|
||||||
NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '')
|
NEWZNAB_DATA = check_setting_str(CFG, 'Newznab', 'newznab_data', '')
|
||||||
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
||||||
|
@ -935,7 +948,6 @@ def initialize(consoleLogging=True):
|
||||||
(METADATA_WDTV, metadata.wdtv),
|
(METADATA_WDTV, metadata.wdtv),
|
||||||
(METADATA_TIVO, metadata.tivo),
|
(METADATA_TIVO, metadata.tivo),
|
||||||
]:
|
]:
|
||||||
|
|
||||||
(cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
|
(cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
|
||||||
tmp_provider = cur_metadata_class.metadata_class()
|
tmp_provider = cur_metadata_class.metadata_class()
|
||||||
tmp_provider.set_config(cur_metadata_config)
|
tmp_provider.set_config(cur_metadata_config)
|
||||||
|
@ -997,12 +1009,12 @@ def initialize(consoleLogging=True):
|
||||||
traktWatchListCheckerSchedular.silent = True
|
traktWatchListCheckerSchedular.silent = True
|
||||||
|
|
||||||
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
||||||
cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
|
cycleTime=datetime.timedelta(
|
||||||
|
minutes=get_backlog_cycle_time()),
|
||||||
threadName="BACKLOG",
|
threadName="BACKLOG",
|
||||||
runImmediately=True)
|
runImmediately=True)
|
||||||
backlogSearchScheduler.action.cycleTime = BACKLOG_SEARCH_FREQUENCY
|
backlogSearchScheduler.action.cycleTime = BACKLOG_SEARCH_FREQUENCY
|
||||||
|
|
||||||
|
|
||||||
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
||||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
||||||
threadName="FINDSUBTITLES",
|
threadName="FINDSUBTITLES",
|
||||||
|
@ -1017,8 +1029,8 @@ def initialize(consoleLogging=True):
|
||||||
__INITIALIZED__ = True
|
__INITIALIZED__ = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def start():
|
|
||||||
|
|
||||||
|
def start():
|
||||||
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, \
|
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, \
|
||||||
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||||
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||||
|
@ -1062,8 +1074,8 @@ def start():
|
||||||
|
|
||||||
started = True
|
started = True
|
||||||
|
|
||||||
def halt ():
|
|
||||||
|
|
||||||
|
def halt():
|
||||||
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \
|
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \
|
||||||
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||||
subtitlesFinderScheduler, started, \
|
subtitlesFinderScheduler, started, \
|
||||||
|
@ -1147,7 +1159,6 @@ def halt ():
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
__INITIALIZED__ = False
|
__INITIALIZED__ = False
|
||||||
|
|
||||||
|
|
||||||
|
@ -1158,7 +1169,6 @@ def sig_handler(signum=None, frame=None):
|
||||||
|
|
||||||
|
|
||||||
def saveAll():
|
def saveAll():
|
||||||
|
|
||||||
global showList
|
global showList
|
||||||
|
|
||||||
# write all shows
|
# write all shows
|
||||||
|
@ -1172,7 +1182,6 @@ def saveAll():
|
||||||
|
|
||||||
|
|
||||||
def saveAndShutdown(restart=False):
|
def saveAndShutdown(restart=False):
|
||||||
|
|
||||||
halt()
|
halt()
|
||||||
|
|
||||||
saveAll()
|
saveAll()
|
||||||
|
@ -1197,7 +1206,8 @@ def saveAndShutdown(restart=False):
|
||||||
popen_list = [os.path.join(PROG_DIR, 'updater.exe'), str(PID), sys.executable]
|
popen_list = [os.path.join(PROG_DIR, 'updater.exe'), str(PID), sys.executable]
|
||||||
else:
|
else:
|
||||||
logger.log(u"Unknown SB launch method, please file a bug report about this", logger.ERROR)
|
logger.log(u"Unknown SB launch method, please file a bug report about this", logger.ERROR)
|
||||||
popen_list = [sys.executable, os.path.join(PROG_DIR, 'updater.py'), str(PID), sys.executable, MY_FULLNAME ]
|
popen_list = [sys.executable, os.path.join(PROG_DIR, 'updater.py'), str(PID), sys.executable,
|
||||||
|
MY_FULLNAME]
|
||||||
|
|
||||||
if popen_list:
|
if popen_list:
|
||||||
popen_list += MY_ARGS
|
popen_list += MY_ARGS
|
||||||
|
@ -1212,20 +1222,24 @@ def saveAndShutdown(restart=False):
|
||||||
|
|
||||||
def invoke_command(to_call, *args, **kwargs):
|
def invoke_command(to_call, *args, **kwargs):
|
||||||
global invoked_command
|
global invoked_command
|
||||||
|
|
||||||
def delegate():
|
def delegate():
|
||||||
to_call(*args, **kwargs)
|
to_call(*args, **kwargs)
|
||||||
|
|
||||||
invoked_command = delegate
|
invoked_command = delegate
|
||||||
logger.log(u"Placed invoked command: "+repr(invoked_command)+" for "+repr(to_call)+" with "+repr(args)+" and "+repr(kwargs), logger.DEBUG)
|
logger.log(u"Placed invoked command: " + repr(invoked_command) + " for " + repr(to_call) + " with " + repr(
|
||||||
|
args) + " and " + repr(kwargs), logger.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
def invoke_restart(soft=True):
|
def invoke_restart(soft=True):
|
||||||
invoke_command(restart, soft=soft)
|
invoke_command(restart, soft=soft)
|
||||||
|
|
||||||
|
|
||||||
def invoke_shutdown():
|
def invoke_shutdown():
|
||||||
invoke_command(saveAndShutdown)
|
invoke_command(saveAndShutdown)
|
||||||
|
|
||||||
|
|
||||||
def restart(soft=True):
|
def restart(soft=True):
|
||||||
|
|
||||||
if soft:
|
if soft:
|
||||||
halt()
|
halt()
|
||||||
saveAll()
|
saveAll()
|
||||||
|
@ -1238,9 +1252,7 @@ def restart(soft=True):
|
||||||
saveAndShutdown(restart=True)
|
saveAndShutdown(restart=True)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def save_config():
|
def save_config():
|
||||||
|
|
||||||
new_config = ConfigObj()
|
new_config = ConfigObj()
|
||||||
new_config.filename = CONFIG_FILE
|
new_config.filename = CONFIG_FILE
|
||||||
|
|
||||||
|
@ -1519,7 +1531,8 @@ def save_config():
|
||||||
new_config['SynologyNotifier']['use_synologynotifier'] = int(USE_SYNOLOGYNOTIFIER)
|
new_config['SynologyNotifier']['use_synologynotifier'] = int(USE_SYNOLOGYNOTIFIER)
|
||||||
new_config['SynologyNotifier']['synologynotifier_notify_onsnatch'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH)
|
new_config['SynologyNotifier']['synologynotifier_notify_onsnatch'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH)
|
||||||
new_config['SynologyNotifier']['synologynotifier_notify_ondownload'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD)
|
new_config['SynologyNotifier']['synologynotifier_notify_ondownload'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD)
|
||||||
new_config['SynologyNotifier']['synologynotifier_notify_onsubtitledownload'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD)
|
new_config['SynologyNotifier']['synologynotifier_notify_onsubtitledownload'] = int(
|
||||||
|
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD)
|
||||||
|
|
||||||
new_config['Trakt'] = {}
|
new_config['Trakt'] = {}
|
||||||
new_config['Trakt']['use_trakt'] = int(USE_TRAKT)
|
new_config['Trakt']['use_trakt'] = int(USE_TRAKT)
|
||||||
|
@ -1564,7 +1577,6 @@ def save_config():
|
||||||
new_config['Pushbullet']['pushbullet_api'] = PUSHBULLET_API
|
new_config['Pushbullet']['pushbullet_api'] = PUSHBULLET_API
|
||||||
new_config['Pushbullet']['pushbullet_device'] = PUSHBULLET_DEVICE
|
new_config['Pushbullet']['pushbullet_device'] = PUSHBULLET_DEVICE
|
||||||
|
|
||||||
|
|
||||||
new_config['Email'] = {}
|
new_config['Email'] = {}
|
||||||
new_config['Email']['use_email'] = int(USE_EMAIL)
|
new_config['Email']['use_email'] = int(USE_EMAIL)
|
||||||
new_config['Email']['email_notify_onsnatch'] = int(EMAIL_NOTIFY_ONSNATCH)
|
new_config['Email']['email_notify_onsnatch'] = int(EMAIL_NOTIFY_ONSNATCH)
|
||||||
|
@ -1606,7 +1618,7 @@ def save_config():
|
||||||
new_config['Subtitles']['subtitles_history'] = int(SUBTITLES_HISTORY)
|
new_config['Subtitles']['subtitles_history'] = int(SUBTITLES_HISTORY)
|
||||||
new_config['Subtitles']['subtitles_finder_frequency'] = int(SUBTITLES_FINDER_FREQUENCY)
|
new_config['Subtitles']['subtitles_finder_frequency'] = int(SUBTITLES_FINDER_FREQUENCY)
|
||||||
|
|
||||||
new_config['FailedDownloads']= {}
|
new_config['FailedDownloads'] = {}
|
||||||
new_config['FailedDownloads']['use_failed_downloads'] = int(USE_FAILED_DOWNLOADS)
|
new_config['FailedDownloads']['use_failed_downloads'] = int(USE_FAILED_DOWNLOADS)
|
||||||
new_config['FailedDownloads']['delete_failed'] = int(DELETE_FAILED)
|
new_config['FailedDownloads']['delete_failed'] = int(DELETE_FAILED)
|
||||||
|
|
||||||
|
@ -1628,12 +1640,12 @@ def launchBrowser(startPort=None):
|
||||||
except:
|
except:
|
||||||
logger.log(u"Unable to launch a browser", logger.ERROR)
|
logger.log(u"Unable to launch a browser", logger.ERROR)
|
||||||
|
|
||||||
def getEpList(epIDs, showid=None):
|
|
||||||
|
|
||||||
|
def getEpList(epIDs, showid=None):
|
||||||
if epIDs == None or len(epIDs) == 0:
|
if epIDs == None or len(epIDs) == 0:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
query = "SELECT * FROM tv_episodes WHERE indexerid in (%s)" % (",".join(['?']*len(epIDs)),)
|
query = "SELECT * FROM tv_episodes WHERE indexerid in (%s)" % (",".join(['?'] * len(epIDs)),)
|
||||||
params = epIDs
|
params = epIDs
|
||||||
|
|
||||||
if showid != None:
|
if showid != None:
|
||||||
|
|
|
@ -24,18 +24,21 @@ from sickbeard import logger
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
from sickbeard import processTV
|
from sickbeard import processTV
|
||||||
|
|
||||||
class PostProcesser():
|
|
||||||
|
|
||||||
|
class PostProcesser():
|
||||||
def run(self):
|
def run(self):
|
||||||
if not sickbeard.PROCESS_AUTOMATICALLY:
|
if not sickbeard.PROCESS_AUTOMATICALLY:
|
||||||
return
|
return
|
||||||
|
|
||||||
if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
|
if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
|
||||||
logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist", logger.ERROR)
|
logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist",
|
||||||
|
logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not ek.ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR):
|
if not ek.ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR):
|
||||||
logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " is relative (and probably not what you really want to process)", logger.ERROR)
|
logger.log(
|
||||||
|
u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " is relative (and probably not what you really want to process)",
|
||||||
|
logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
|
processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)
|
||||||
|
|
|
@ -79,26 +79,27 @@ def foldersAtPath(path, includeParent=False):
|
||||||
if path == parentPath and os.name == 'nt':
|
if path == parentPath and os.name == 'nt':
|
||||||
parentPath = ""
|
parentPath = ""
|
||||||
|
|
||||||
fileList = [{ 'name': filename, 'path': ek.ek(os.path.join, path, filename) } for filename in ek.ek(os.listdir, path)]
|
fileList = [{'name': filename, 'path': ek.ek(os.path.join, path, filename)} for filename in ek.ek(os.listdir, path)]
|
||||||
fileList = filter(lambda entry: ek.ek(os.path.isdir, entry['path']), fileList)
|
fileList = filter(lambda entry: ek.ek(os.path.isdir, entry['path']), fileList)
|
||||||
|
|
||||||
# prune out directories to proect the user from doing stupid things (already lower case the dir to reduce calls)
|
# prune out directories to proect the user from doing stupid things (already lower case the dir to reduce calls)
|
||||||
hideList = ["boot", "bootmgr", "cache", "msocache", "recovery", "$recycle.bin", "recycler", "system volume information", "temporary internet files"] # windows specific
|
hideList = ["boot", "bootmgr", "cache", "msocache", "recovery", "$recycle.bin", "recycler",
|
||||||
|
"system volume information", "temporary internet files"] # windows specific
|
||||||
hideList += [".fseventd", ".spotlight", ".trashes", ".vol", "cachedmessages", "caches", "trash"] # osx specific
|
hideList += [".fseventd", ".spotlight", ".trashes", ".vol", "cachedmessages", "caches", "trash"] # osx specific
|
||||||
fileList = filter(lambda entry: entry['name'].lower() not in hideList, fileList)
|
fileList = filter(lambda entry: entry['name'].lower() not in hideList, fileList)
|
||||||
|
|
||||||
fileList = sorted(fileList, lambda x, y: cmp(os.path.basename(x['name']).lower(), os.path.basename(y['path']).lower()))
|
fileList = sorted(fileList,
|
||||||
|
lambda x, y: cmp(os.path.basename(x['name']).lower(), os.path.basename(y['path']).lower()))
|
||||||
|
|
||||||
entries = [{'current_path': path}]
|
entries = [{'current_path': path}]
|
||||||
if includeParent and parentPath != path:
|
if includeParent and parentPath != path:
|
||||||
entries.append({ 'name': "..", 'path': parentPath })
|
entries.append({'name': "..", 'path': parentPath})
|
||||||
entries.extend(fileList)
|
entries.extend(fileList)
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
|
|
||||||
class WebFileBrowser:
|
class WebFileBrowser:
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self, path=''):
|
def index(self, path=''):
|
||||||
cherrypy.response.headers['Content-Type'] = "application/json"
|
cherrypy.response.headers['Content-Type'] = "application/json"
|
||||||
|
@ -108,4 +109,4 @@ class WebFileBrowser:
|
||||||
def complete(self, term):
|
def complete(self, term):
|
||||||
cherrypy.response.headers['Content-Type'] = "application/json"
|
cherrypy.response.headers['Content-Type'] = "application/json"
|
||||||
paths = [entry['path'] for entry in foldersAtPath(os.path.dirname(term)) if 'path' in entry]
|
paths = [entry['path'] for entry in foldersAtPath(os.path.dirname(term)) if 'path' in entry]
|
||||||
return json.dumps( paths )
|
return json.dumps(paths)
|
||||||
|
|
|
@ -25,9 +25,11 @@ import datetime
|
||||||
|
|
||||||
from common import USER_AGENT, Quality
|
from common import USER_AGENT, Quality
|
||||||
|
|
||||||
|
|
||||||
class SickBeardURLopener(urllib.FancyURLopener):
|
class SickBeardURLopener(urllib.FancyURLopener):
|
||||||
version = USER_AGENT
|
version = USER_AGENT
|
||||||
|
|
||||||
|
|
||||||
class AuthURLOpener(SickBeardURLopener):
|
class AuthURLOpener(SickBeardURLopener):
|
||||||
"""
|
"""
|
||||||
URLOpener class that supports http auth without needing interactive password entry.
|
URLOpener class that supports http auth without needing interactive password entry.
|
||||||
|
@ -36,6 +38,7 @@ class AuthURLOpener(SickBeardURLopener):
|
||||||
user: username to use for HTTP auth
|
user: username to use for HTTP auth
|
||||||
pw: password to use for HTTP auth
|
pw: password to use for HTTP auth
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, user, pw):
|
def __init__(self, user, pw):
|
||||||
self.username = user
|
self.username = user
|
||||||
self.password = pw
|
self.password = pw
|
||||||
|
@ -66,6 +69,7 @@ class AuthURLOpener(SickBeardURLopener):
|
||||||
self.numTries = 0
|
self.numTries = 0
|
||||||
return SickBeardURLopener.open(self, url)
|
return SickBeardURLopener.open(self, url)
|
||||||
|
|
||||||
|
|
||||||
class SearchResult:
|
class SearchResult:
|
||||||
"""
|
"""
|
||||||
Represents a search result from an indexer.
|
Represents a search result from an indexer.
|
||||||
|
@ -112,18 +116,21 @@ class SearchResult:
|
||||||
def fileName(self):
|
def fileName(self):
|
||||||
return self.episodes[0].prettyName() + "." + self.resultType
|
return self.episodes[0].prettyName() + "." + self.resultType
|
||||||
|
|
||||||
|
|
||||||
class NZBSearchResult(SearchResult):
|
class NZBSearchResult(SearchResult):
|
||||||
"""
|
"""
|
||||||
Regular NZB result with an URL to the NZB
|
Regular NZB result with an URL to the NZB
|
||||||
"""
|
"""
|
||||||
resultType = "nzb"
|
resultType = "nzb"
|
||||||
|
|
||||||
|
|
||||||
class NZBDataSearchResult(SearchResult):
|
class NZBDataSearchResult(SearchResult):
|
||||||
"""
|
"""
|
||||||
NZB result where the actual NZB XML data is stored in the extraInfo
|
NZB result where the actual NZB XML data is stored in the extraInfo
|
||||||
"""
|
"""
|
||||||
resultType = "nzbdata"
|
resultType = "nzbdata"
|
||||||
|
|
||||||
|
|
||||||
class TorrentSearchResult(SearchResult):
|
class TorrentSearchResult(SearchResult):
|
||||||
"""
|
"""
|
||||||
Torrent result with an URL to the torrent
|
Torrent result with an URL to the torrent
|
||||||
|
@ -131,17 +138,36 @@ class TorrentSearchResult(SearchResult):
|
||||||
resultType = "torrent"
|
resultType = "torrent"
|
||||||
|
|
||||||
|
|
||||||
|
class AllShowsListUI:
|
||||||
|
"""
|
||||||
|
This class is for tvdb-api. Instead of prompting with a UI to pick the
|
||||||
|
desired result out of a list of shows it tries to be smart about it
|
||||||
|
based on what shows are in SB.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config, log=None):
|
||||||
|
self.config = config
|
||||||
|
self.log = log
|
||||||
|
|
||||||
|
def selectSeries(self, allSeries):
|
||||||
|
# get all available shows
|
||||||
|
if allSeries:
|
||||||
|
return allSeries
|
||||||
|
|
||||||
|
|
||||||
class ShowListUI:
|
class ShowListUI:
|
||||||
"""
|
"""
|
||||||
This class is for tvdb-api. Instead of prompting with a UI to pick the
|
This class is for tvdb-api. Instead of prompting with a UI to pick the
|
||||||
desired result out of a list of shows it tries to be smart about it
|
desired result out of a list of shows it tries to be smart about it
|
||||||
based on what shows are in SB.
|
based on what shows are in SB.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, config, log=None):
|
def __init__(self, config, log=None):
|
||||||
self.config = config
|
self.config = config
|
||||||
self.log = log
|
self.log = log
|
||||||
|
|
||||||
def selectSeries(self, allSeries):
|
def selectSeries(self, allSeries):
|
||||||
|
if sickbeard.showList:
|
||||||
idList = [x.indexerid for x in sickbeard.showList]
|
idList = [x.indexerid for x in sickbeard.showList]
|
||||||
|
|
||||||
# try to pick a show that's in my show list
|
# try to pick a show that's in my show list
|
||||||
|
@ -149,9 +175,10 @@ class ShowListUI:
|
||||||
if int(curShow['id']) in idList:
|
if int(curShow['id']) in idList:
|
||||||
return curShow
|
return curShow
|
||||||
|
|
||||||
# if nothing matches then just go with the first match I guess
|
# if nothing matches then return everything
|
||||||
return allSeries[0]
|
return allSeries[0]
|
||||||
|
|
||||||
|
|
||||||
class Proper:
|
class Proper:
|
||||||
def __init__(self, name, url, date):
|
def __init__(self, name, url, date):
|
||||||
self.name = name
|
self.name = name
|
||||||
|
@ -166,7 +193,8 @@ class Proper:
|
||||||
self.episode = -1
|
self.episode = -1
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.date)+" "+self.name+" "+str(self.season)+"x"+str(self.episode)+" of "+str(self.indexerid+" from "+self.indexer)
|
return str(self.date) + " " + self.name + " " + str(self.season) + "x" + str(self.episode) + " of " + str(
|
||||||
|
self.indexerid) + " from " + str(sickbeard.indexerApi(self.indexer).name)
|
||||||
|
|
||||||
|
|
||||||
class ErrorViewer():
|
class ErrorViewer():
|
||||||
|
@ -188,10 +216,12 @@ class ErrorViewer():
|
||||||
def clear():
|
def clear():
|
||||||
ErrorViewer.errors = []
|
ErrorViewer.errors = []
|
||||||
|
|
||||||
|
|
||||||
class UIError():
|
class UIError():
|
||||||
"""
|
"""
|
||||||
Represents an error to be displayed in the web UI.
|
Represents an error to be displayed in the web UI.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
self.message = message
|
self.message = message
|
||||||
self.time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
self.time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
|
|
@ -21,7 +21,7 @@ __all__ = ['utorrent',
|
||||||
'deluge',
|
'deluge',
|
||||||
'download_station',
|
'download_station',
|
||||||
'rtorrent'
|
'rtorrent'
|
||||||
]
|
]
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
|
||||||
|
@ -62,22 +62,22 @@ http_error_code = {
|
||||||
505: 'HTTP Version Not Supported',
|
505: 'HTTP Version Not Supported',
|
||||||
}
|
}
|
||||||
|
|
||||||
default_host = {'utorrent':'http://localhost:8000',
|
default_host = {'utorrent': 'http://localhost:8000',
|
||||||
'transmission' :'http://localhost:9091',
|
'transmission': 'http://localhost:9091',
|
||||||
'deluge':'http://localhost:8112',
|
'deluge': 'http://localhost:8112',
|
||||||
'download_station': 'http://localhost:5000',
|
'download_station': 'http://localhost:5000',
|
||||||
'rtorrent': 'scgi://localhost:5000',
|
'rtorrent': 'scgi://localhost:5000',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def getClientModule(name):
|
def getClientModule(name):
|
||||||
|
|
||||||
name = name.lower()
|
name = name.lower()
|
||||||
prefix = "sickbeard.clients."
|
prefix = "sickbeard.clients."
|
||||||
|
|
||||||
return __import__(prefix+name, fromlist=__all__)
|
return __import__(prefix + name, fromlist=__all__)
|
||||||
|
|
||||||
|
|
||||||
def getClientIstance(name):
|
def getClientIstance(name):
|
||||||
|
|
||||||
module = getClientModule(name)
|
module = getClientModule(name)
|
||||||
className = module.api.__class__.__name__
|
className = module.api.__class__.__name__
|
||||||
|
|
||||||
|
|
|
@ -23,8 +23,8 @@ import sickbeard
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard.clients.generic import GenericClient
|
from sickbeard.clients.generic import GenericClient
|
||||||
|
|
||||||
class DelugeAPI(GenericClient):
|
|
||||||
|
|
||||||
|
class DelugeAPI(GenericClient):
|
||||||
def __init__(self, host=None, username=None, password=None):
|
def __init__(self, host=None, username=None, password=None):
|
||||||
|
|
||||||
super(DelugeAPI, self).__init__('Deluge', host, username, password)
|
super(DelugeAPI, self).__init__('Deluge', host, username, password)
|
||||||
|
@ -44,7 +44,6 @@ class DelugeAPI(GenericClient):
|
||||||
|
|
||||||
self.auth = self.response.json()["result"]
|
self.auth = self.response.json()["result"]
|
||||||
|
|
||||||
|
|
||||||
post_data = json.dumps({"method": "web.connected",
|
post_data = json.dumps({"method": "web.connected",
|
||||||
"params": [],
|
"params": [],
|
||||||
"id": 10
|
"id": 10
|
||||||
|
@ -79,7 +78,6 @@ class DelugeAPI(GenericClient):
|
||||||
except:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
post_data = json.dumps({"method": "web.connected",
|
post_data = json.dumps({"method": "web.connected",
|
||||||
"params": [],
|
"params": [],
|
||||||
"id": 10
|
"id": 10
|
||||||
|
@ -99,7 +97,8 @@ class DelugeAPI(GenericClient):
|
||||||
def _add_torrent_uri(self, result):
|
def _add_torrent_uri(self, result):
|
||||||
|
|
||||||
post_data = json.dumps({"method": "core.add_torrent_magnet",
|
post_data = json.dumps({"method": "core.add_torrent_magnet",
|
||||||
"params": [result.url,{"move_completed": "true", "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
|
"params": [result.url, {"move_completed": "true",
|
||||||
|
"move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
|
||||||
"id": 2
|
"id": 2
|
||||||
})
|
})
|
||||||
self._request(method='post', data=post_data)
|
self._request(method='post', data=post_data)
|
||||||
|
@ -111,7 +110,9 @@ class DelugeAPI(GenericClient):
|
||||||
def _add_torrent_file(self, result):
|
def _add_torrent_file(self, result):
|
||||||
|
|
||||||
post_data = json.dumps({"method": "core.add_torrent_file",
|
post_data = json.dumps({"method": "core.add_torrent_file",
|
||||||
"params": [result.name + '.torrent', b64encode(result.content),{"move_completed": "true", "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
|
"params": [result.name + '.torrent', b64encode(result.content),
|
||||||
|
{"move_completed": "true",
|
||||||
|
"move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
|
||||||
"id": 2
|
"id": 2
|
||||||
})
|
})
|
||||||
self._request(method='post', data=post_data)
|
self._request(method='post', data=post_data)
|
||||||
|
@ -134,21 +135,22 @@ class DelugeAPI(GenericClient):
|
||||||
|
|
||||||
if labels != None:
|
if labels != None:
|
||||||
if label not in labels:
|
if label not in labels:
|
||||||
logger.log(self.name + ': ' + label +u" label does not exist in Deluge we must add it", logger.DEBUG)
|
logger.log(self.name + ': ' + label + u" label does not exist in Deluge we must add it",
|
||||||
|
logger.DEBUG)
|
||||||
post_data = json.dumps({"method": 'label.add',
|
post_data = json.dumps({"method": 'label.add',
|
||||||
"params": [label],
|
"params": [label],
|
||||||
"id": 4
|
"id": 4
|
||||||
})
|
})
|
||||||
self._request(method='post', data=post_data)
|
self._request(method='post', data=post_data)
|
||||||
logger.log(self.name + ': ' + label +u" label added to Deluge", logger.DEBUG)
|
logger.log(self.name + ': ' + label + u" label added to Deluge", logger.DEBUG)
|
||||||
|
|
||||||
# add label to torrent
|
# add label to torrent
|
||||||
post_data = json.dumps({ "method": 'label.set_torrent',
|
post_data = json.dumps({"method": 'label.set_torrent',
|
||||||
"params": [result.hash, label],
|
"params": [result.hash, label],
|
||||||
"id": 5
|
"id": 5
|
||||||
})
|
})
|
||||||
self._request(method='post', data=post_data)
|
self._request(method='post', data=post_data)
|
||||||
logger.log(self.name + ': ' + label +u" label added to torrent", logger.DEBUG)
|
logger.log(self.name + ': ' + label + u" label added to torrent", logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
logger.log(self.name + ': ' + u"label plugin not detected", logger.DEBUG)
|
logger.log(self.name + ': ' + u"label plugin not detected", logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
@ -166,7 +168,7 @@ class DelugeAPI(GenericClient):
|
||||||
self._request(method='post', data=post_data)
|
self._request(method='post', data=post_data)
|
||||||
|
|
||||||
post_data = json.dumps({"method": "core.set_torrent_stop_ratio",
|
post_data = json.dumps({"method": "core.set_torrent_stop_ratio",
|
||||||
"params": [result.hash,float(sickbeard.TORRENT_RATIO)],
|
"params": [result.hash, float(sickbeard.TORRENT_RATIO)],
|
||||||
"id": 6
|
"id": 6
|
||||||
})
|
})
|
||||||
self._request(method='post', data=post_data)
|
self._request(method='post', data=post_data)
|
||||||
|
@ -207,4 +209,5 @@ class DelugeAPI(GenericClient):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
api = DelugeAPI()
|
api = DelugeAPI()
|
|
@ -23,8 +23,8 @@
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard.clients.generic import GenericClient
|
from sickbeard.clients.generic import GenericClient
|
||||||
|
|
||||||
class DownloadStationAPI(GenericClient):
|
|
||||||
|
|
||||||
|
class DownloadStationAPI(GenericClient):
|
||||||
def __init__(self, host=None, username=None, password=None):
|
def __init__(self, host=None, username=None, password=None):
|
||||||
|
|
||||||
super(DownloadStationAPI, self).__init__('DownloadStation', host, username, password)
|
super(DownloadStationAPI, self).__init__('DownloadStation', host, username, password)
|
||||||
|
@ -45,11 +45,11 @@ class DownloadStationAPI(GenericClient):
|
||||||
|
|
||||||
def _add_torrent_uri(self, result):
|
def _add_torrent_uri(self, result):
|
||||||
|
|
||||||
data = {'api':'SYNO.DownloadStation.Task',
|
data = {'api': 'SYNO.DownloadStation.Task',
|
||||||
'version':'1', 'method':'create',
|
'version': '1', 'method': 'create',
|
||||||
'session':'DownloadStation',
|
'session': 'DownloadStation',
|
||||||
'_sid':self.auth,
|
'_sid': self.auth,
|
||||||
'uri':result.url
|
'uri': result.url
|
||||||
}
|
}
|
||||||
self._request(method='post', data=data)
|
self._request(method='post', data=data)
|
||||||
|
|
||||||
|
@ -57,15 +57,16 @@ class DownloadStationAPI(GenericClient):
|
||||||
|
|
||||||
def _add_torrent_file(self, result):
|
def _add_torrent_file(self, result):
|
||||||
|
|
||||||
data = {'api':'SYNO.DownloadStation.Task',
|
data = {'api': 'SYNO.DownloadStation.Task',
|
||||||
'version':'1',
|
'version': '1',
|
||||||
'method':'create',
|
'method': 'create',
|
||||||
'session':'DownloadStation',
|
'session': 'DownloadStation',
|
||||||
'_sid':self.auth
|
'_sid': self.auth
|
||||||
}
|
}
|
||||||
files = {'file':(result.name + '.torrent', result.content)}
|
files = {'file': (result.name + '.torrent', result.content)}
|
||||||
self._request(method='post', data=data, files=files)
|
self._request(method='post', data=data, files=files)
|
||||||
|
|
||||||
return self.response.json()['success']
|
return self.response.json()['success']
|
||||||
|
|
||||||
|
|
||||||
api = DownloadStationAPI()
|
api = DownloadStationAPI()
|
||||||
|
|
|
@ -10,8 +10,8 @@ from sickbeard.clients import http_error_code
|
||||||
from lib.bencode import bencode, bdecode
|
from lib.bencode import bencode, bdecode
|
||||||
from lib import requests
|
from lib import requests
|
||||||
|
|
||||||
class GenericClient(object):
|
|
||||||
|
|
||||||
|
class GenericClient(object):
|
||||||
def __init__(self, name, host=None, username=None, password=None):
|
def __init__(self, name, host=None, username=None, password=None):
|
||||||
|
|
||||||
self.name = name
|
self.name = name
|
||||||
|
@ -32,14 +32,18 @@ class GenericClient(object):
|
||||||
self.last_time = time.time()
|
self.last_time = time.time()
|
||||||
self._get_auth()
|
self._get_auth()
|
||||||
|
|
||||||
logger.log(self.name + u': Requested a ' + method.upper() + ' connection to url '+ self.url + ' with Params= ' + str(params) + ' Data=' + str(data if data else 'None')[0:99] + ('...' if len(data if data else 'None') > 200 else ''), logger.DEBUG)
|
logger.log(
|
||||||
|
self.name + u': Requested a ' + method.upper() + ' connection to url ' + self.url + ' with Params= ' + str(
|
||||||
|
params) + ' Data=' + str(data if data else 'None')[0:99] + (
|
||||||
|
'...' if len(data if data else 'None') > 200 else ''), logger.DEBUG)
|
||||||
|
|
||||||
if not self.auth:
|
if not self.auth:
|
||||||
logger.log(self.name + u': Authentication Failed' , logger.ERROR)
|
logger.log(self.name + u': Authentication Failed', logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files, timeout=10, verify=False)
|
self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
|
||||||
|
timeout=10, verify=False)
|
||||||
except requests.exceptions.ConnectionError, e:
|
except requests.exceptions.ConnectionError, e:
|
||||||
logger.log(self.name + u': Unable to connect ' + ex(e), logger.ERROR)
|
logger.log(self.name + u': Unable to connect ' + ex(e), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
@ -53,7 +57,8 @@ class GenericClient(object):
|
||||||
logger.log(self.name + u': Connection Timeout ' + ex(e), logger.ERROR)
|
logger.log(self.name + u': Connection Timeout ' + ex(e), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(self.name + u': Unknown exception raised when send torrent to ' + self.name + ': ' + ex(e), logger.ERROR)
|
logger.log(self.name + u': Unknown exception raised when send torrent to ' + self.name + ': ' + ex(e),
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.response.status_code == 401:
|
if self.response.status_code == 401:
|
||||||
|
@ -64,7 +69,7 @@ class GenericClient(object):
|
||||||
logger.log(self.name + u': ' + http_error_code[self.response.status_code], logger.DEBUG)
|
logger.log(self.name + u': ' + http_error_code[self.response.status_code], logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.log(self.name + u': Response to '+ method.upper() + ' request is ' + self.response.text, logger.DEBUG)
|
logger.log(self.name + u': Response to ' + method.upper() + ' request is ' + self.response.text, logger.DEBUG)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -142,7 +147,7 @@ class GenericClient(object):
|
||||||
logger.log(u'Calling ' + self.name + ' Client', logger.DEBUG)
|
logger.log(u'Calling ' + self.name + ' Client', logger.DEBUG)
|
||||||
|
|
||||||
if not self._get_auth():
|
if not self._get_auth():
|
||||||
logger.log(self.name + u': Authentication Failed' , logger.ERROR)
|
logger.log(self.name + u': Authentication Failed', logger.ERROR)
|
||||||
return r_code
|
return r_code
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -186,7 +191,7 @@ class GenericClient(object):
|
||||||
except requests.exceptions.ConnectionError, e:
|
except requests.exceptions.ConnectionError, e:
|
||||||
return False, 'Error: ' + self.name + ' Connection Error'
|
return False, 'Error: ' + self.name + ' Connection Error'
|
||||||
except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL):
|
except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL):
|
||||||
return False,'Error: Invalid ' + self.name + ' host'
|
return False, 'Error: Invalid ' + self.name + ' host'
|
||||||
|
|
||||||
if self.response.status_code == 401:
|
if self.response.status_code == 401:
|
||||||
return False, 'Error: Invalid ' + self.name + ' Username or Password, check your config!'
|
return False, 'Error: Invalid ' + self.name + ' Username or Password, check your config!'
|
||||||
|
@ -198,4 +203,4 @@ class GenericClient(object):
|
||||||
else:
|
else:
|
||||||
return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!'
|
return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!'
|
||||||
except Exception:
|
except Exception:
|
||||||
return False, 'Error: Unable to connect to '+ self.name
|
return False, 'Error: Unable to connect to ' + self.name
|
||||||
|
|
|
@ -23,8 +23,8 @@ from sickbeard.clients.generic import GenericClient
|
||||||
from lib.rtorrent import RTorrent
|
from lib.rtorrent import RTorrent
|
||||||
from lib.rtorrent.err import MethodError
|
from lib.rtorrent.err import MethodError
|
||||||
|
|
||||||
class rTorrentAPI(GenericClient):
|
|
||||||
|
|
||||||
|
class rTorrentAPI(GenericClient):
|
||||||
def __init__(self, host=None, username=None, password=None):
|
def __init__(self, host=None, username=None, password=None):
|
||||||
super(rTorrentAPI, self).__init__('rTorrent', host, username, password)
|
super(rTorrentAPI, self).__init__('rTorrent', host, username, password)
|
||||||
|
|
||||||
|
@ -187,6 +187,7 @@ class rTorrentAPI(GenericClient):
|
||||||
else:
|
else:
|
||||||
return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!'
|
return False, 'Error: Unable to get ' + self.name + ' Authentication, check your config!'
|
||||||
except Exception:
|
except Exception:
|
||||||
return False, 'Error: Unable to connect to '+ self.name
|
return False, 'Error: Unable to connect to ' + self.name
|
||||||
|
|
||||||
|
|
||||||
api = rTorrentAPI()
|
api = rTorrentAPI()
|
||||||
|
|
|
@ -23,8 +23,8 @@ from base64 import b64encode
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard.clients.generic import GenericClient
|
from sickbeard.clients.generic import GenericClient
|
||||||
|
|
||||||
class TransmissionAPI(GenericClient):
|
|
||||||
|
|
||||||
|
class TransmissionAPI(GenericClient):
|
||||||
def __init__(self, host=None, username=None, password=None):
|
def __init__(self, host=None, username=None, password=None):
|
||||||
|
|
||||||
super(TransmissionAPI, self).__init__('Transmission', host, username, password)
|
super(TransmissionAPI, self).__init__('Transmission', host, username, password)
|
||||||
|
@ -33,7 +33,7 @@ class TransmissionAPI(GenericClient):
|
||||||
|
|
||||||
def _get_auth(self):
|
def _get_auth(self):
|
||||||
|
|
||||||
post_data = json.dumps({'method': 'session-get',})
|
post_data = json.dumps({'method': 'session-get', })
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
|
||||||
|
@ -53,11 +53,11 @@ class TransmissionAPI(GenericClient):
|
||||||
|
|
||||||
def _add_torrent_uri(self, result):
|
def _add_torrent_uri(self, result):
|
||||||
|
|
||||||
arguments = { 'filename': result.url,
|
arguments = {'filename': result.url,
|
||||||
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
|
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
|
||||||
'download-dir': sickbeard.TORRENT_PATH
|
'download-dir': sickbeard.TORRENT_PATH
|
||||||
}
|
}
|
||||||
post_data = json.dumps({ 'arguments': arguments,
|
post_data = json.dumps({'arguments': arguments,
|
||||||
'method': 'torrent-add',
|
'method': 'torrent-add',
|
||||||
})
|
})
|
||||||
self._request(method='post', data=post_data)
|
self._request(method='post', data=post_data)
|
||||||
|
@ -66,7 +66,7 @@ class TransmissionAPI(GenericClient):
|
||||||
|
|
||||||
def _add_torrent_file(self, result):
|
def _add_torrent_file(self, result):
|
||||||
|
|
||||||
arguments = { 'metainfo': b64encode(result.content),
|
arguments = {'metainfo': b64encode(result.content),
|
||||||
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
|
'paused': 1 if sickbeard.TORRENT_PAUSED else 0,
|
||||||
'download-dir': sickbeard.TORRENT_PATH
|
'download-dir': sickbeard.TORRENT_PATH
|
||||||
}
|
}
|
||||||
|
@ -92,7 +92,7 @@ class TransmissionAPI(GenericClient):
|
||||||
ratio = float(sickbeard.TORRENT_RATIO)
|
ratio = float(sickbeard.TORRENT_RATIO)
|
||||||
mode = 1 # Stop seeding at seedRatioLimit
|
mode = 1 # Stop seeding at seedRatioLimit
|
||||||
|
|
||||||
arguments = { 'ids': [torrent_id],
|
arguments = {'ids': [torrent_id],
|
||||||
'seedRatioLimit': ratio,
|
'seedRatioLimit': ratio,
|
||||||
'seedRatioMode': mode
|
'seedRatioMode': mode
|
||||||
}
|
}
|
||||||
|
@ -107,7 +107,7 @@ class TransmissionAPI(GenericClient):
|
||||||
|
|
||||||
torrent_id = self._get_torrent_hash(result)
|
torrent_id = self._get_torrent_hash(result)
|
||||||
|
|
||||||
arguments = { 'ids': [torrent_id]}
|
arguments = {'ids': [torrent_id]}
|
||||||
|
|
||||||
if result.priority == -1:
|
if result.priority == -1:
|
||||||
arguments['priority-low'] = []
|
arguments['priority-low'] = []
|
||||||
|
|
|
@ -21,8 +21,8 @@ import re
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard.clients.generic import GenericClient
|
from sickbeard.clients.generic import GenericClient
|
||||||
|
|
||||||
class uTorrentAPI(GenericClient):
|
|
||||||
|
|
||||||
|
class uTorrentAPI(GenericClient):
|
||||||
def __init__(self, host=None, username=None, password=None):
|
def __init__(self, host=None, username=None, password=None):
|
||||||
|
|
||||||
super(uTorrentAPI, self).__init__('uTorrent', host, username, password)
|
super(uTorrentAPI, self).__init__('uTorrent', host, username, password)
|
||||||
|
@ -31,7 +31,7 @@ class uTorrentAPI(GenericClient):
|
||||||
|
|
||||||
def _request(self, method='get', params={}, files=None):
|
def _request(self, method='get', params={}, files=None):
|
||||||
|
|
||||||
params.update({'token':self.auth})
|
params.update({'token': self.auth})
|
||||||
return super(uTorrentAPI, self)._request(method=method, params=params, files=files)
|
return super(uTorrentAPI, self)._request(method=method, params=params, files=files)
|
||||||
|
|
||||||
def _get_auth(self):
|
def _get_auth(self):
|
||||||
|
@ -46,31 +46,32 @@ class uTorrentAPI(GenericClient):
|
||||||
|
|
||||||
def _add_torrent_uri(self, result):
|
def _add_torrent_uri(self, result):
|
||||||
|
|
||||||
params={'action':'add-url', 's': result.url}
|
params = {'action': 'add-url', 's': result.url}
|
||||||
return self._request(params=params)
|
return self._request(params=params)
|
||||||
|
|
||||||
def _add_torrent_file(self, result):
|
def _add_torrent_file(self, result):
|
||||||
|
|
||||||
params = {'action':'add-file'}
|
params = {'action': 'add-file'}
|
||||||
files={'torrent_file': (result.name + '.torrent', result.content)}
|
files = {'torrent_file': (result.name + '.torrent', result.content)}
|
||||||
return self._request(method='post', params=params, files=files)
|
return self._request(method='post', params=params, files=files)
|
||||||
|
|
||||||
def _set_torrent_label(self, result):
|
def _set_torrent_label(self, result):
|
||||||
|
|
||||||
params = {'action':'setprops',
|
params = {'action': 'setprops',
|
||||||
'hash':result.hash,
|
'hash': result.hash,
|
||||||
's':'label',
|
's': 'label',
|
||||||
'v':sickbeard.TORRENT_LABEL
|
'v': sickbeard.TORRENT_LABEL
|
||||||
}
|
}
|
||||||
return self._request(params=params)
|
return self._request(params=params)
|
||||||
|
|
||||||
def _set_torrent_pause(self, result):
|
def _set_torrent_pause(self, result):
|
||||||
|
|
||||||
if sickbeard.TORRENT_PAUSED:
|
if sickbeard.TORRENT_PAUSED:
|
||||||
params = {'action':'pause', 'hash':result.hash}
|
params = {'action': 'pause', 'hash': result.hash}
|
||||||
else:
|
else:
|
||||||
params = {'action':'start', 'hash':result.hash}
|
params = {'action': 'start', 'hash': result.hash}
|
||||||
|
|
||||||
return self._request(params=params)
|
return self._request(params=params)
|
||||||
|
|
||||||
|
|
||||||
api = uTorrentAPI()
|
api = uTorrentAPI()
|
|
@ -80,13 +80,6 @@ multiEpStrings[NAMING_EXTEND] = "Extend"
|
||||||
multiEpStrings[NAMING_LIMITED_EXTEND] = "Extend (Limited)"
|
multiEpStrings[NAMING_LIMITED_EXTEND] = "Extend (Limited)"
|
||||||
multiEpStrings[NAMING_LIMITED_EXTEND_E_PREFIXED] = "Extend (Limited, E-prefixed)"
|
multiEpStrings[NAMING_LIMITED_EXTEND_E_PREFIXED] = "Extend (Limited, E-prefixed)"
|
||||||
|
|
||||||
### Notification Types
|
|
||||||
INDEXER_TVDB = "Tvdb"
|
|
||||||
INDEXER_TVRAGE = "TVRage"
|
|
||||||
|
|
||||||
indexerStrings = {}
|
|
||||||
indexerStrings[INDEXER_TVDB] = "TheTVDB"
|
|
||||||
indexerStrings[INDEXER_TVRAGE] = "TVRage"
|
|
||||||
|
|
||||||
class Quality:
|
class Quality:
|
||||||
NONE = 0 # 0
|
NONE = 0 # 0
|
||||||
|
@ -125,7 +118,8 @@ class Quality:
|
||||||
def _getStatusStrings(status):
|
def _getStatusStrings(status):
|
||||||
toReturn = {}
|
toReturn = {}
|
||||||
for x in Quality.qualityStrings.keys():
|
for x in Quality.qualityStrings.keys():
|
||||||
toReturn[Quality.compositeStatus(status, x)] = Quality.statusPrefixes[status] + " (" + Quality.qualityStrings[x] + ")"
|
toReturn[Quality.compositeStatus(status, x)] = Quality.statusPrefixes[status] + " (" + \
|
||||||
|
Quality.qualityStrings[x] + ")"
|
||||||
return toReturn
|
return toReturn
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -166,7 +160,7 @@ class Quality:
|
||||||
if x == Quality.NONE: #Last chance
|
if x == Quality.NONE: #Last chance
|
||||||
return Quality.sceneQuality(name)
|
return Quality.sceneQuality(name)
|
||||||
|
|
||||||
regex = '\W' + Quality.qualityStrings[x].replace(' ','\W') + '\W'
|
regex = '\W' + Quality.qualityStrings[x].replace(' ', '\W') + '\W'
|
||||||
regex_match = re.search(regex, name, re.I)
|
regex_match = re.search(regex, name, re.I)
|
||||||
if regex_match:
|
if regex_match:
|
||||||
return x
|
return x
|
||||||
|
@ -187,7 +181,8 @@ class Quality:
|
||||||
return Quality.SDTV
|
return Quality.SDTV
|
||||||
elif checkName(["(dvdrip|b[r|d]rip)(.ws)?.(xvid|divx|x264)"], any) and not checkName(["(720|1080)[pi]"], all):
|
elif checkName(["(dvdrip|b[r|d]rip)(.ws)?.(xvid|divx|x264)"], any) and not checkName(["(720|1080)[pi]"], all):
|
||||||
return Quality.SDDVD
|
return Quality.SDDVD
|
||||||
elif checkName(["720p", "hdtv", "x264"], all) or checkName(["hr.ws.pdtv.x264"], any) and not checkName(["(1080)[pi]"], all):
|
elif checkName(["720p", "hdtv", "x264"], all) or checkName(["hr.ws.pdtv.x264"], any) and not checkName(
|
||||||
|
["(1080)[pi]"], all):
|
||||||
return Quality.HDTV
|
return Quality.HDTV
|
||||||
elif checkName(["720p|1080i", "hdtv", "mpeg-?2"], all) or checkName(["1080i.hdtv", "h.?264"], all):
|
elif checkName(["720p|1080i", "hdtv", "mpeg-?2"], all) or checkName(["1080i.hdtv", "h.?264"], all):
|
||||||
return Quality.RAWHDTV
|
return Quality.RAWHDTV
|
||||||
|
@ -212,8 +207,8 @@ class Quality:
|
||||||
def assumeQuality(name):
|
def assumeQuality(name):
|
||||||
if name.lower().endswith((".avi", ".mp4")):
|
if name.lower().endswith((".avi", ".mp4")):
|
||||||
return Quality.SDTV
|
return Quality.SDTV
|
||||||
# elif name.lower().endswith(".mkv"):
|
# elif name.lower().endswith(".mkv"):
|
||||||
# return Quality.HDTV
|
# return Quality.HDTV
|
||||||
elif name.lower().endswith(".ts"):
|
elif name.lower().endswith(".ts"):
|
||||||
return Quality.RAWHDTV
|
return Quality.RAWHDTV
|
||||||
else:
|
else:
|
||||||
|
@ -252,6 +247,7 @@ class Quality:
|
||||||
FAILED = None
|
FAILED = None
|
||||||
SNATCHED_BEST = None
|
SNATCHED_BEST = None
|
||||||
|
|
||||||
|
|
||||||
Quality.DOWNLOADED = [Quality.compositeStatus(DOWNLOADED, x) for x in Quality.qualityStrings.keys()]
|
Quality.DOWNLOADED = [Quality.compositeStatus(DOWNLOADED, x) for x in Quality.qualityStrings.keys()]
|
||||||
Quality.SNATCHED = [Quality.compositeStatus(SNATCHED, x) for x in Quality.qualityStrings.keys()]
|
Quality.SNATCHED = [Quality.compositeStatus(SNATCHED, x) for x in Quality.qualityStrings.keys()]
|
||||||
Quality.SNATCHED_PROPER = [Quality.compositeStatus(SNATCHED_PROPER, x) for x in Quality.qualityStrings.keys()]
|
Quality.SNATCHED_PROPER = [Quality.compositeStatus(SNATCHED_PROPER, x) for x in Quality.qualityStrings.keys()]
|
||||||
|
@ -259,10 +255,14 @@ Quality.FAILED = [Quality.compositeStatus(FAILED, x) for x in Quality.qualityStr
|
||||||
Quality.SNATCHED_BEST = [Quality.compositeStatus(SNATCHED_BEST, x) for x in Quality.qualityStrings.keys()]
|
Quality.SNATCHED_BEST = [Quality.compositeStatus(SNATCHED_BEST, x) for x in Quality.qualityStrings.keys()]
|
||||||
|
|
||||||
SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], [])
|
SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], [])
|
||||||
HD = Quality.combineQualities([Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY], []) # HD720p + HD1080p
|
HD = Quality.combineQualities(
|
||||||
|
[Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY],
|
||||||
|
[]) # HD720p + HD1080p
|
||||||
HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
|
HD720p = Quality.combineQualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], [])
|
||||||
HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
|
HD1080p = Quality.combineQualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], [])
|
||||||
ANY = Quality.combineQualities([Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD
|
ANY = Quality.combineQualities(
|
||||||
|
[Quality.SDTV, Quality.SDDVD, Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
|
||||||
|
Quality.HDBLURAY, Quality.FULLHDBLURAY, Quality.UNKNOWN], []) # SD + HD
|
||||||
|
|
||||||
# legacy template, cant remove due to reference in mainDB upgrade?
|
# legacy template, cant remove due to reference in mainDB upgrade?
|
||||||
BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
|
BEST = Quality.combineQualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV])
|
||||||
|
@ -274,6 +274,7 @@ qualityPresetStrings = {SD: "SD",
|
||||||
HD1080p: "HD1080p",
|
HD1080p: "HD1080p",
|
||||||
ANY: "Any"}
|
ANY: "Any"}
|
||||||
|
|
||||||
|
|
||||||
class StatusStrings:
|
class StatusStrings:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.statusStrings = {UNKNOWN: "Unknown",
|
self.statusStrings = {UNKNOWN: "Unknown",
|
||||||
|
@ -302,8 +303,10 @@ class StatusStrings:
|
||||||
def has_key(self, name):
|
def has_key(self, name):
|
||||||
return name in self.statusStrings or name in Quality.DOWNLOADED or name in Quality.SNATCHED or name in Quality.SNATCHED_PROPER or name in Quality.SNATCHED_BEST
|
return name in self.statusStrings or name in Quality.DOWNLOADED or name in Quality.SNATCHED or name in Quality.SNATCHED_PROPER or name in Quality.SNATCHED_BEST
|
||||||
|
|
||||||
|
|
||||||
statusStrings = StatusStrings()
|
statusStrings = StatusStrings()
|
||||||
|
|
||||||
|
|
||||||
class Overview:
|
class Overview:
|
||||||
UNAIRED = UNAIRED # 1
|
UNAIRED = UNAIRED # 1
|
||||||
QUAL = 2
|
QUAL = 2
|
||||||
|
@ -325,9 +328,8 @@ class Overview:
|
||||||
XML_NSMAP = {'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
|
XML_NSMAP = {'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
|
||||||
'xsd': 'http://www.w3.org/2001/XMLSchema'}
|
'xsd': 'http://www.w3.org/2001/XMLSchema'}
|
||||||
|
|
||||||
|
|
||||||
countryList = {'Australia': 'AU',
|
countryList = {'Australia': 'AU',
|
||||||
'Canada': 'CA',
|
'Canada': 'CA',
|
||||||
'USA': 'US'
|
'USA': 'US'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,6 @@ naming_sep_type_text = (" - ", "space")
|
||||||
|
|
||||||
|
|
||||||
def change_HTTPS_CERT(https_cert):
|
def change_HTTPS_CERT(https_cert):
|
||||||
|
|
||||||
if https_cert == '':
|
if https_cert == '':
|
||||||
sickbeard.HTTPS_CERT = ''
|
sickbeard.HTTPS_CERT = ''
|
||||||
return True
|
return True
|
||||||
|
@ -60,7 +59,6 @@ def change_HTTPS_CERT(https_cert):
|
||||||
|
|
||||||
|
|
||||||
def change_HTTPS_KEY(https_key):
|
def change_HTTPS_KEY(https_key):
|
||||||
|
|
||||||
if https_key == '':
|
if https_key == '':
|
||||||
sickbeard.HTTPS_KEY = ''
|
sickbeard.HTTPS_KEY = ''
|
||||||
return True
|
return True
|
||||||
|
@ -76,7 +74,6 @@ def change_HTTPS_KEY(https_key):
|
||||||
|
|
||||||
|
|
||||||
def change_LOG_DIR(log_dir, web_log):
|
def change_LOG_DIR(log_dir, web_log):
|
||||||
|
|
||||||
log_dir_changed = False
|
log_dir_changed = False
|
||||||
abs_log_dir = os.path.normpath(os.path.join(sickbeard.DATA_DIR, log_dir))
|
abs_log_dir = os.path.normpath(os.path.join(sickbeard.DATA_DIR, log_dir))
|
||||||
web_log_value = checkbox_to_value(web_log)
|
web_log_value = checkbox_to_value(web_log)
|
||||||
|
@ -110,7 +107,6 @@ def change_LOG_DIR(log_dir, web_log):
|
||||||
|
|
||||||
|
|
||||||
def change_NZB_DIR(nzb_dir):
|
def change_NZB_DIR(nzb_dir):
|
||||||
|
|
||||||
if nzb_dir == '':
|
if nzb_dir == '':
|
||||||
sickbeard.NZB_DIR = ''
|
sickbeard.NZB_DIR = ''
|
||||||
return True
|
return True
|
||||||
|
@ -126,7 +122,6 @@ def change_NZB_DIR(nzb_dir):
|
||||||
|
|
||||||
|
|
||||||
def change_TORRENT_DIR(torrent_dir):
|
def change_TORRENT_DIR(torrent_dir):
|
||||||
|
|
||||||
if torrent_dir == '':
|
if torrent_dir == '':
|
||||||
sickbeard.TORRENT_DIR = ''
|
sickbeard.TORRENT_DIR = ''
|
||||||
return True
|
return True
|
||||||
|
@ -142,7 +137,6 @@ def change_TORRENT_DIR(torrent_dir):
|
||||||
|
|
||||||
|
|
||||||
def change_TV_DOWNLOAD_DIR(tv_download_dir):
|
def change_TV_DOWNLOAD_DIR(tv_download_dir):
|
||||||
|
|
||||||
if tv_download_dir == '':
|
if tv_download_dir == '':
|
||||||
sickbeard.TV_DOWNLOAD_DIR = ''
|
sickbeard.TV_DOWNLOAD_DIR = ''
|
||||||
return True
|
return True
|
||||||
|
@ -158,7 +152,6 @@ def change_TV_DOWNLOAD_DIR(tv_download_dir):
|
||||||
|
|
||||||
|
|
||||||
def change_SEARCH_FREQUENCY(freq):
|
def change_SEARCH_FREQUENCY(freq):
|
||||||
|
|
||||||
sickbeard.SEARCH_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_SEARCH_FREQUENCY)
|
sickbeard.SEARCH_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_SEARCH_FREQUENCY)
|
||||||
|
|
||||||
if sickbeard.SEARCH_FREQUENCY < sickbeard.MIN_SEARCH_FREQUENCY:
|
if sickbeard.SEARCH_FREQUENCY < sickbeard.MIN_SEARCH_FREQUENCY:
|
||||||
|
@ -169,7 +162,6 @@ def change_SEARCH_FREQUENCY(freq):
|
||||||
|
|
||||||
|
|
||||||
def change_VERSION_NOTIFY(version_notify):
|
def change_VERSION_NOTIFY(version_notify):
|
||||||
|
|
||||||
oldSetting = sickbeard.VERSION_NOTIFY
|
oldSetting = sickbeard.VERSION_NOTIFY
|
||||||
|
|
||||||
sickbeard.VERSION_NOTIFY = version_notify
|
sickbeard.VERSION_NOTIFY = version_notify
|
||||||
|
@ -235,7 +227,6 @@ def clean_host(host, default_port=None):
|
||||||
|
|
||||||
|
|
||||||
def clean_hosts(hosts, default_port=None):
|
def clean_hosts(hosts, default_port=None):
|
||||||
|
|
||||||
cleaned_hosts = []
|
cleaned_hosts = []
|
||||||
|
|
||||||
for cur_host in [x.strip() for x in hosts.split(",")]:
|
for cur_host in [x.strip() for x in hosts.split(",")]:
|
||||||
|
@ -363,7 +354,6 @@ def check_setting_str(config, cfg_name, item_name, def_val, log=True):
|
||||||
|
|
||||||
|
|
||||||
class ConfigMigrator():
|
class ConfigMigrator():
|
||||||
|
|
||||||
def __init__(self, config_obj):
|
def __init__(self, config_obj):
|
||||||
"""
|
"""
|
||||||
Initializes a config migrator that can take the config from the version indicated in the config
|
Initializes a config migrator that can take the config from the version indicated in the config
|
||||||
|
@ -388,7 +378,9 @@ class ConfigMigrator():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self.config_version > self.expected_config_version:
|
if self.config_version > self.expected_config_version:
|
||||||
logger.log_error_and_exit(u"Your config version (" + str(self.config_version) + ") has been incremented past what this version of Sick Beard supports (" + str(self.expected_config_version) + ").\n" + \
|
logger.log_error_and_exit(u"Your config version (" + str(
|
||||||
|
self.config_version) + ") has been incremented past what this version of Sick Beard supports (" + str(
|
||||||
|
self.expected_config_version) + ").\n" + \
|
||||||
"If you have used other forks or a newer version of Sick Beard, your config file may be unusable due to their modifications.")
|
"If you have used other forks or a newer version of Sick Beard, your config file may be unusable due to their modifications.")
|
||||||
|
|
||||||
sickbeard.CONFIG_VERSION = self.config_version
|
sickbeard.CONFIG_VERSION = self.config_version
|
||||||
|
@ -452,7 +444,8 @@ class ConfigMigrator():
|
||||||
new_season_format = new_season_format.replace('09', '%0S')
|
new_season_format = new_season_format.replace('09', '%0S')
|
||||||
new_season_format = new_season_format.replace('9', '%S')
|
new_season_format = new_season_format.replace('9', '%S')
|
||||||
|
|
||||||
logger.log(u"Changed season folder format from " + old_season_format + " to " + new_season_format + ", prepending it to your naming config")
|
logger.log(
|
||||||
|
u"Changed season folder format from " + old_season_format + " to " + new_season_format + ", prepending it to your naming config")
|
||||||
sickbeard.NAMING_PATTERN = new_season_format + os.sep + sickbeard.NAMING_PATTERN
|
sickbeard.NAMING_PATTERN = new_season_format + os.sep + sickbeard.NAMING_PATTERN
|
||||||
|
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
|
@ -552,7 +545,8 @@ class ConfigMigrator():
|
||||||
try:
|
try:
|
||||||
name, url, key, enabled = cur_provider_data.split("|")
|
name, url, key, enabled = cur_provider_data.split("|")
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.log(u"Skipping Newznab provider string: '" + cur_provider_data + "', incorrect format", logger.ERROR)
|
logger.log(u"Skipping Newznab provider string: '" + cur_provider_data + "', incorrect format",
|
||||||
|
logger.ERROR)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if name == 'Sick Beard Index':
|
if name == 'Sick Beard Index':
|
||||||
|
@ -623,7 +617,8 @@ class ConfigMigrator():
|
||||||
logger.log(u"Upgrading " + metadata_name + " metadata, new value: " + metadata)
|
logger.log(u"Upgrading " + metadata_name + " metadata, new value: " + metadata)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.log(u"Skipping " + metadata_name + " metadata: '" + metadata + "', incorrect format", logger.ERROR)
|
logger.log(u"Skipping " + metadata_name + " metadata: '" + metadata + "', incorrect format",
|
||||||
|
logger.ERROR)
|
||||||
metadata = '0|0|0|0|0|0|0|0|0|0'
|
metadata = '0|0|0|0|0|0|0|0|0|0'
|
||||||
logger.log(u"Setting " + metadata_name + " metadata, new value: " + metadata)
|
logger.log(u"Setting " + metadata_name + " metadata, new value: " + metadata)
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
|
|
||||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||||
class InitialSchema (db.SchemaUpgrade):
|
class InitialSchema(db.SchemaUpgrade):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasTable("lastUpdate")
|
return self.hasTable("lastUpdate")
|
||||||
|
|
||||||
|
@ -36,12 +36,15 @@ class InitialSchema (db.SchemaUpgrade):
|
||||||
else:
|
else:
|
||||||
self.connection.action(query[0], query[1:])
|
self.connection.action(query[0], query[1:])
|
||||||
|
|
||||||
|
|
||||||
class AddSceneExceptions(InitialSchema):
|
class AddSceneExceptions(InitialSchema):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasTable("scene_exceptions")
|
return self.hasTable("scene_exceptions")
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, tvdb_id INTEGER KEY, show_name TEXT)")
|
self.connection.action(
|
||||||
|
"CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, tvdb_id INTEGER KEY, show_name TEXT)")
|
||||||
|
|
||||||
|
|
||||||
class AddSceneNameCache(AddSceneExceptions):
|
class AddSceneNameCache(AddSceneExceptions):
|
||||||
def test(self):
|
def test(self):
|
||||||
|
@ -50,6 +53,7 @@ class AddSceneNameCache(AddSceneExceptions):
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action("CREATE TABLE scene_names (tvdb_id INTEGER, name TEXT)")
|
self.connection.action("CREATE TABLE scene_names (tvdb_id INTEGER, name TEXT)")
|
||||||
|
|
||||||
|
|
||||||
class AddNetworkTimezones(AddSceneNameCache):
|
class AddNetworkTimezones(AddSceneNameCache):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasTable("network_timezones")
|
return self.hasTable("network_timezones")
|
||||||
|
@ -57,19 +61,24 @@ class AddNetworkTimezones(AddSceneNameCache):
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)")
|
self.connection.action("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)")
|
||||||
|
|
||||||
|
|
||||||
class AddXemNumbering(AddNetworkTimezones):
|
class AddXemNumbering(AddNetworkTimezones):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasTable("xem_numbering")
|
return self.hasTable("xem_numbering")
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action("CREATE TABLE xem_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
|
self.connection.action(
|
||||||
|
"CREATE TABLE xem_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
|
||||||
|
|
||||||
|
|
||||||
class AddXemRefresh(AddXemNumbering):
|
class AddXemRefresh(AddXemNumbering):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasTable("xem_refresh")
|
return self.hasTable("xem_refresh")
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action("CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)")
|
self.connection.action(
|
||||||
|
"CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)")
|
||||||
|
|
||||||
|
|
||||||
class ConvertSceneExceptionsToIndexerID(AddXemRefresh):
|
class ConvertSceneExceptionsToIndexerID(AddXemRefresh):
|
||||||
def test(self):
|
def test(self):
|
||||||
|
@ -77,10 +86,13 @@ class ConvertSceneExceptionsToIndexerID(AddXemRefresh):
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
self.connection.action("ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions")
|
self.connection.action("ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions")
|
||||||
self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT)")
|
self.connection.action(
|
||||||
self.connection.action("INSERT INTO scene_exceptions(exception_id, indexer_id, show_name) SELECT exception_id, tvdb_id, show_name FROM tmp_scene_exceptions")
|
"CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT)")
|
||||||
|
self.connection.action(
|
||||||
|
"INSERT INTO scene_exceptions(exception_id, indexer_id, show_name) SELECT exception_id, tvdb_id, show_name FROM tmp_scene_exceptions")
|
||||||
self.connection.action("DROP TABLE tmp_scene_exceptions")
|
self.connection.action("DROP TABLE tmp_scene_exceptions")
|
||||||
|
|
||||||
|
|
||||||
class ConvertSceneNamesToIndexerID(ConvertSceneExceptionsToIndexerID):
|
class ConvertSceneNamesToIndexerID(ConvertSceneExceptionsToIndexerID):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasColumn("scene_names", "indexer_id")
|
return self.hasColumn("scene_names", "indexer_id")
|
||||||
|
|
|
@ -49,6 +49,7 @@ class SizeAndProvider(InitialSchema):
|
||||||
|
|
||||||
class History(SizeAndProvider):
|
class History(SizeAndProvider):
|
||||||
"""Snatch history that can't be modified by the user"""
|
"""Snatch history that can't be modified by the user"""
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasTable('history')
|
return self.hasTable('history')
|
||||||
|
|
||||||
|
@ -59,6 +60,7 @@ class History(SizeAndProvider):
|
||||||
|
|
||||||
class HistoryStatus(History):
|
class HistoryStatus(History):
|
||||||
"""Store episode status before snatch to revert to if necessary"""
|
"""Store episode status before snatch to revert to if necessary"""
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasColumn('history', 'old_status')
|
return self.hasColumn('history', 'old_status')
|
||||||
|
|
||||||
|
|
|
@ -29,8 +29,8 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
||||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||||
MAX_DB_VERSION = 27
|
MAX_DB_VERSION = 27
|
||||||
|
|
||||||
class MainSanityCheck(db.DBSanityCheck):
|
|
||||||
|
|
||||||
|
class MainSanityCheck(db.DBSanityCheck):
|
||||||
def check(self):
|
def check(self):
|
||||||
self.fix_duplicate_shows()
|
self.fix_duplicate_shows()
|
||||||
self.fix_duplicate_episodes()
|
self.fix_duplicate_episodes()
|
||||||
|
@ -38,18 +38,23 @@ class MainSanityCheck(db.DBSanityCheck):
|
||||||
|
|
||||||
def fix_duplicate_shows(self):
|
def fix_duplicate_shows(self):
|
||||||
|
|
||||||
sqlResults = self.connection.select("SELECT show_id, indexer_id, COUNT(indexer_id) as count FROM tv_shows GROUP BY indexer_id HAVING count > 1")
|
sqlResults = self.connection.select(
|
||||||
|
"SELECT show_id, indexer_id, COUNT(indexer_id) as count FROM tv_shows GROUP BY indexer_id HAVING count > 1")
|
||||||
|
|
||||||
for cur_duplicate in sqlResults:
|
for cur_duplicate in sqlResults:
|
||||||
|
|
||||||
logger.log(u"Duplicate show detected! indexer_id: " + str(cur_duplicate["indexer_id"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG)
|
logger.log(u"Duplicate show detected! indexer_id: " + str(cur_duplicate["indexer_id"]) + u" count: " + str(
|
||||||
|
cur_duplicate["count"]), logger.DEBUG)
|
||||||
|
|
||||||
cur_dupe_results = self.connection.select("SELECT show_id, indexer_id FROM tv_shows WHERE indexer_id = ? LIMIT ?",
|
cur_dupe_results = self.connection.select(
|
||||||
[cur_duplicate["indexer_id"], int(cur_duplicate["count"])-1]
|
"SELECT show_id, indexer_id FROM tv_shows WHERE indexer_id = ? LIMIT ?",
|
||||||
|
[cur_duplicate["indexer_id"], int(cur_duplicate["count"]) - 1]
|
||||||
)
|
)
|
||||||
|
|
||||||
for cur_dupe_id in cur_dupe_results:
|
for cur_dupe_id in cur_dupe_results:
|
||||||
logger.log(u"Deleting duplicate show with indexer_id: " + str(cur_dupe_id["indexer_id"]) + u" show_id: " + str(cur_dupe_id["show_id"]))
|
logger.log(
|
||||||
|
u"Deleting duplicate show with indexer_id: " + str(cur_dupe_id["indexer_id"]) + u" show_id: " + str(
|
||||||
|
cur_dupe_id["show_id"]))
|
||||||
self.connection.action("DELETE FROM tv_shows WHERE show_id = ?", [cur_dupe_id["show_id"]])
|
self.connection.action("DELETE FROM tv_shows WHERE show_id = ?", [cur_dupe_id["show_id"]])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -57,14 +62,19 @@ class MainSanityCheck(db.DBSanityCheck):
|
||||||
|
|
||||||
def fix_duplicate_episodes(self):
|
def fix_duplicate_episodes(self):
|
||||||
|
|
||||||
sqlResults = self.connection.select("SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1")
|
sqlResults = self.connection.select(
|
||||||
|
"SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1")
|
||||||
|
|
||||||
for cur_duplicate in sqlResults:
|
for cur_duplicate in sqlResults:
|
||||||
|
|
||||||
logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: "+str(cur_duplicate["season"]) + u" episode: "+str(cur_duplicate["episode"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG)
|
logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: " + str(
|
||||||
|
cur_duplicate["season"]) + u" episode: " + str(cur_duplicate["episode"]) + u" count: " + str(
|
||||||
|
cur_duplicate["count"]), logger.DEBUG)
|
||||||
|
|
||||||
cur_dupe_results = self.connection.select("SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?",
|
cur_dupe_results = self.connection.select(
|
||||||
[cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"], int(cur_duplicate["count"])-1]
|
"SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?",
|
||||||
|
[cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"],
|
||||||
|
int(cur_duplicate["count"]) - 1]
|
||||||
)
|
)
|
||||||
|
|
||||||
for cur_dupe_id in cur_dupe_results:
|
for cur_dupe_id in cur_dupe_results:
|
||||||
|
@ -76,16 +86,19 @@ class MainSanityCheck(db.DBSanityCheck):
|
||||||
|
|
||||||
def fix_orphan_episodes(self):
|
def fix_orphan_episodes(self):
|
||||||
|
|
||||||
sqlResults = self.connection.select("SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL")
|
sqlResults = self.connection.select(
|
||||||
|
"SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL")
|
||||||
|
|
||||||
for cur_orphan in sqlResults:
|
for cur_orphan in sqlResults:
|
||||||
logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str(cur_orphan["showid"]), logger.DEBUG)
|
logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str(
|
||||||
logger.log(u"Deleting orphan episode with episode_id: "+str(cur_orphan["episode_id"]))
|
cur_orphan["showid"]), logger.DEBUG)
|
||||||
|
logger.log(u"Deleting orphan episode with episode_id: " + str(cur_orphan["episode_id"]))
|
||||||
self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]])
|
self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.log(u"No orphan episodes, check passed")
|
logger.log(u"No orphan episodes, check passed")
|
||||||
|
|
||||||
|
|
||||||
def backupDatabase(version):
|
def backupDatabase(version):
|
||||||
logger.log(u"Backing up database before upgrade")
|
logger.log(u"Backing up database before upgrade")
|
||||||
if not helpers.backupVersionedFile(db.dbFilename(), version):
|
if not helpers.backupVersionedFile(db.dbFilename(), version):
|
||||||
|
@ -93,12 +106,13 @@ def backupDatabase(version):
|
||||||
else:
|
else:
|
||||||
logger.log(u"Proceeding with upgrade")
|
logger.log(u"Proceeding with upgrade")
|
||||||
|
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# = Main DB Migrations =
|
# = Main DB Migrations =
|
||||||
# ======================
|
# ======================
|
||||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||||
|
|
||||||
class InitialSchema (db.SchemaUpgrade):
|
class InitialSchema(db.SchemaUpgrade):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasTable("db_version")
|
return self.hasTable("db_version")
|
||||||
|
|
||||||
|
@ -123,20 +137,21 @@ class InitialSchema (db.SchemaUpgrade):
|
||||||
cur_db_version = self.checkDBVersion()
|
cur_db_version = self.checkDBVersion()
|
||||||
|
|
||||||
if cur_db_version < MIN_DB_VERSION:
|
if cur_db_version < MIN_DB_VERSION:
|
||||||
logger.log_error_and_exit(u"Your database version (" + str(cur_db_version) + ") is too old to migrate from what this version of Sick Beard supports (" + \
|
logger.log_error_and_exit(u"Your database version (" + str(
|
||||||
|
cur_db_version) + ") is too old to migrate from what this version of Sick Beard supports (" + \
|
||||||
str(MIN_DB_VERSION) + ").\n" + \
|
str(MIN_DB_VERSION) + ").\n" + \
|
||||||
"Upgrade using a previous version (tag) build 496 to build 501 of Sick Beard first or remove database file to begin fresh."
|
"Upgrade using a previous version (tag) build 496 to build 501 of Sick Beard first or remove database file to begin fresh."
|
||||||
)
|
)
|
||||||
|
|
||||||
if cur_db_version > MAX_DB_VERSION:
|
if cur_db_version > MAX_DB_VERSION:
|
||||||
logger.log_error_and_exit(u"Your database version (" + str(cur_db_version) + ") has been incremented past what this version of Sick Beard supports (" + \
|
logger.log_error_and_exit(u"Your database version (" + str(
|
||||||
|
cur_db_version) + ") has been incremented past what this version of Sick Beard supports (" + \
|
||||||
str(MAX_DB_VERSION) + ").\n" + \
|
str(MAX_DB_VERSION) + ").\n" + \
|
||||||
"If you have used other forks of Sick Beard, your database may be unusable due to their modifications."
|
"If you have used other forks of Sick Beard, your database may be unusable due to their modifications."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class AddSizeAndSceneNameFields(InitialSchema):
|
class AddSizeAndSceneNameFields(InitialSchema):
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 10
|
return self.checkDBVersion() >= 10
|
||||||
|
|
||||||
|
@ -160,7 +175,8 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
||||||
# if there is no size yet then populate it for us
|
# if there is no size yet then populate it for us
|
||||||
if (not cur_ep["file_size"] or not int(cur_ep["file_size"])) and ek.ek(os.path.isfile, cur_ep["location"]):
|
if (not cur_ep["file_size"] or not int(cur_ep["file_size"])) and ek.ek(os.path.isfile, cur_ep["location"]):
|
||||||
cur_size = ek.ek(os.path.getsize, cur_ep["location"])
|
cur_size = ek.ek(os.path.getsize, cur_ep["location"])
|
||||||
self.connection.action("UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?", [cur_size, int(cur_ep["episode_id"])])
|
self.connection.action("UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?",
|
||||||
|
[cur_size, int(cur_ep["episode_id"])])
|
||||||
|
|
||||||
# check each snatch to see if we can use it to get a release name from
|
# check each snatch to see if we can use it to get a release name from
|
||||||
history_results = self.connection.select("SELECT * FROM history WHERE provider != -1 ORDER BY date ASC")
|
history_results = self.connection.select("SELECT * FROM history WHERE provider != -1 ORDER BY date ASC")
|
||||||
|
@ -168,10 +184,12 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
||||||
logger.log(u"Adding release name to all episodes still in history")
|
logger.log(u"Adding release name to all episodes still in history")
|
||||||
for cur_result in history_results:
|
for cur_result in history_results:
|
||||||
# find the associated download, if there isn't one then ignore it
|
# find the associated download, if there isn't one then ignore it
|
||||||
download_results = self.connection.select("SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?",
|
download_results = self.connection.select(
|
||||||
|
"SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?",
|
||||||
[cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"]])
|
[cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"]])
|
||||||
if not download_results:
|
if not download_results:
|
||||||
logger.log(u"Found a snatch in the history for "+cur_result["resource"]+" but couldn't find the associated download, skipping it", logger.DEBUG)
|
logger.log(u"Found a snatch in the history for " + cur_result[
|
||||||
|
"resource"] + " but couldn't find the associated download, skipping it", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
nzb_name = cur_result["resource"]
|
nzb_name = cur_result["resource"]
|
||||||
|
@ -182,10 +200,13 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
||||||
file_name = file_name.rpartition('.')[0]
|
file_name = file_name.rpartition('.')[0]
|
||||||
|
|
||||||
# find the associated episode on disk
|
# find the associated episode on disk
|
||||||
ep_results = self.connection.select("SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''",
|
ep_results = self.connection.select(
|
||||||
|
"SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''",
|
||||||
[cur_result["showid"], cur_result["season"], cur_result["episode"]])
|
[cur_result["showid"], cur_result["season"], cur_result["episode"]])
|
||||||
if not ep_results:
|
if not ep_results:
|
||||||
logger.log(u"The episode "+nzb_name+" was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG)
|
logger.log(
|
||||||
|
u"The episode " + nzb_name + " was found in history but doesn't exist on disk anymore, skipping",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# get the status/quality of the existing ep and make sure it's what we expect
|
# get the status/quality of the existing ep and make sure it's what we expect
|
||||||
|
@ -198,7 +219,7 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
||||||
|
|
||||||
# make sure this is actually a real release name and not a season pack or something
|
# make sure this is actually a real release name and not a season pack or something
|
||||||
for cur_name in (nzb_name, file_name):
|
for cur_name in (nzb_name, file_name):
|
||||||
logger.log(u"Checking if "+cur_name+" is actually a good release name", logger.DEBUG)
|
logger.log(u"Checking if " + cur_name + " is actually a good release name", logger.DEBUG)
|
||||||
try:
|
try:
|
||||||
np = NameParser(False)
|
np = NameParser(False)
|
||||||
parse_result = np.parse(cur_name)
|
parse_result = np.parse(cur_name)
|
||||||
|
@ -207,7 +228,8 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
||||||
|
|
||||||
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
|
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
|
||||||
# if all is well by this point we'll just put the release name into the database
|
# if all is well by this point we'll just put the release name into the database
|
||||||
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [cur_name, ep_results[0]["episode_id"]])
|
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?",
|
||||||
|
[cur_name, ep_results[0]["episode_id"]])
|
||||||
break
|
break
|
||||||
|
|
||||||
# check each snatch to see if we can use it to get a release name from
|
# check each snatch to see if we can use it to get a release name from
|
||||||
|
@ -232,21 +254,24 @@ class AddSizeAndSceneNameFields(InitialSchema):
|
||||||
if not parse_result.release_group:
|
if not parse_result.release_group:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logger.log(u"Name "+ep_file_name+" gave release group of "+parse_result.release_group+", seems valid", logger.DEBUG)
|
logger.log(
|
||||||
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [ep_file_name, cur_result["episode_id"]])
|
u"Name " + ep_file_name + " gave release group of " + parse_result.release_group + ", seems valid",
|
||||||
|
logger.DEBUG)
|
||||||
|
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?",
|
||||||
|
[ep_file_name, cur_result["episode_id"]])
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
class RenameSeasonFolders(AddSizeAndSceneNameFields):
|
|
||||||
|
|
||||||
|
class RenameSeasonFolders(AddSizeAndSceneNameFields):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 11
|
return self.checkDBVersion() >= 11
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
|
|
||||||
# rename the column
|
# rename the column
|
||||||
self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows")
|
self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows")
|
||||||
self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)")
|
self.connection.action(
|
||||||
|
"CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)")
|
||||||
sql = "INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows"
|
sql = "INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows"
|
||||||
self.connection.action(sql)
|
self.connection.action(sql)
|
||||||
|
|
||||||
|
@ -258,27 +283,29 @@ class RenameSeasonFolders(AddSizeAndSceneNameFields):
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddSubtitlesSupport(RenameSeasonFolders):
|
class AddSubtitlesSupport(RenameSeasonFolders):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 12
|
return self.checkDBVersion() >= 12
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
|
|
||||||
self.addColumn("tv_shows", "subtitles")
|
self.addColumn("tv_shows", "subtitles")
|
||||||
self.addColumn("tv_episodes", "subtitles", "TEXT", "")
|
self.addColumn("tv_episodes", "subtitles", "TEXT", "")
|
||||||
self.addColumn("tv_episodes", "subtitles_searchcount")
|
self.addColumn("tv_episodes", "subtitles_searchcount")
|
||||||
self.addColumn("tv_episodes", "subtitles_lastsearch", "TIMESTAMP", str(datetime.datetime.min))
|
self.addColumn("tv_episodes", "subtitles_lastsearch", "TIMESTAMP", str(datetime.datetime.min))
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddIMDbInfo(RenameSeasonFolders):
|
class AddIMDbInfo(RenameSeasonFolders):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 13
|
return self.checkDBVersion() >= 13
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
|
self.connection.action(
|
||||||
self.connection.action("CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
|
"CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class Add1080pAndRawHDQualities(AddIMDbInfo):
|
class Add1080pAndRawHDQualities(AddIMDbInfo):
|
||||||
"""Add support for 1080p related qualities along with RawHD
|
"""Add support for 1080p related qualities along with RawHD
|
||||||
|
|
||||||
|
@ -311,17 +338,17 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
||||||
|
|
||||||
result = old_quality
|
result = old_quality
|
||||||
# move fullhdbluray from 1<<5 to 1<<8 if set
|
# move fullhdbluray from 1<<5 to 1<<8 if set
|
||||||
if(result & (1<<5)):
|
if (result & (1 << 5)):
|
||||||
result = result & ~(1<<5)
|
result = result & ~(1 << 5)
|
||||||
result = result | (1<<8)
|
result = result | (1 << 8)
|
||||||
# move hdbluray from 1<<4 to 1<<7 if set
|
# move hdbluray from 1<<4 to 1<<7 if set
|
||||||
if(result & (1<<4)):
|
if (result & (1 << 4)):
|
||||||
result = result & ~(1<<4)
|
result = result & ~(1 << 4)
|
||||||
result = result | (1<<7)
|
result = result | (1 << 7)
|
||||||
# move hdwebdl from 1<<3 to 1<<5 if set
|
# move hdwebdl from 1<<3 to 1<<5 if set
|
||||||
if(result & (1<<3)):
|
if (result & (1 << 3)):
|
||||||
result = result & ~(1<<3)
|
result = result & ~(1 << 3)
|
||||||
result = result | (1<<5)
|
result = result | (1 << 5)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -350,12 +377,19 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
||||||
sickbeard.save_config()
|
sickbeard.save_config()
|
||||||
|
|
||||||
# upgrade previous HD to HD720p -- shift previous qualities to new placevalues
|
# upgrade previous HD to HD720p -- shift previous qualities to new placevalues
|
||||||
old_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], [])
|
old_hd = common.Quality.combineQualities(
|
||||||
new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], [])
|
[common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], [])
|
||||||
|
new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY],
|
||||||
|
[])
|
||||||
|
|
||||||
# update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template
|
# update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template
|
||||||
old_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], [])
|
old_any = common.Quality.combineQualities(
|
||||||
new_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], [])
|
[common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2,
|
||||||
|
common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], [])
|
||||||
|
new_any = common.Quality.combineQualities(
|
||||||
|
[common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV,
|
||||||
|
common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY,
|
||||||
|
common.Quality.UNKNOWN], [])
|
||||||
|
|
||||||
# update qualities (including templates)
|
# update qualities (including templates)
|
||||||
logger.log(u"[1/4] Updating pre-defined templates and the quality for each show...", logger.MESSAGE)
|
logger.log(u"[1/4] Updating pre-defined templates and the quality for each show...", logger.MESSAGE)
|
||||||
|
@ -376,7 +410,8 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
||||||
ql = []
|
ql = []
|
||||||
episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800")
|
episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800")
|
||||||
for cur_episode in episodes:
|
for cur_episode in episodes:
|
||||||
ql.append(["UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [self._update_status(cur_episode["status"]), cur_episode["episode_id"]]])
|
ql.append(["UPDATE tv_episodes SET status = ? WHERE episode_id = ?",
|
||||||
|
[self._update_status(cur_episode["status"]), cur_episode["episode_id"]]])
|
||||||
self.connection.mass_action(ql)
|
self.connection.mass_action(ql)
|
||||||
|
|
||||||
# make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together
|
# make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together
|
||||||
|
@ -386,7 +421,8 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
||||||
ql = []
|
ql = []
|
||||||
historyAction = self.connection.select("SELECT * FROM history WHERE action < 3276800 AND action >= 800")
|
historyAction = self.connection.select("SELECT * FROM history WHERE action < 3276800 AND action >= 800")
|
||||||
for cur_entry in historyAction:
|
for cur_entry in historyAction:
|
||||||
ql.append(["UPDATE history SET action = ? WHERE showid = ? AND date = ?", [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]])
|
ql.append(["UPDATE history SET action = ? WHERE showid = ? AND date = ?",
|
||||||
|
[self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]])
|
||||||
self.connection.mass_action(ql)
|
self.connection.mass_action(ql)
|
||||||
|
|
||||||
# update previous history so it shows the correct quality
|
# update previous history so it shows the correct quality
|
||||||
|
@ -394,7 +430,8 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
||||||
ql = []
|
ql = []
|
||||||
historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8")
|
historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8")
|
||||||
for cur_entry in historyQuality:
|
for cur_entry in historyQuality:
|
||||||
ql.append(["UPDATE history SET quality = ? WHERE showid = ? AND date = ?", [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]])
|
ql.append(["UPDATE history SET quality = ? WHERE showid = ? AND date = ?",
|
||||||
|
[self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]])
|
||||||
self.connection.mass_action(ql)
|
self.connection.mass_action(ql)
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
@ -403,6 +440,7 @@ class Add1080pAndRawHDQualities(AddIMDbInfo):
|
||||||
logger.log(u"Performing a vacuum on the database.", logger.DEBUG)
|
logger.log(u"Performing a vacuum on the database.", logger.DEBUG)
|
||||||
self.connection.action("VACUUM")
|
self.connection.action("VACUUM")
|
||||||
|
|
||||||
|
|
||||||
class AddProperNamingSupport(Add1080pAndRawHDQualities):
|
class AddProperNamingSupport(Add1080pAndRawHDQualities):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 15
|
return self.checkDBVersion() >= 15
|
||||||
|
@ -411,6 +449,7 @@ class AddProperNamingSupport(Add1080pAndRawHDQualities):
|
||||||
self.addColumn("tv_episodes", "is_proper")
|
self.addColumn("tv_episodes", "is_proper")
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddEmailSubscriptionTable(AddProperNamingSupport):
|
class AddEmailSubscriptionTable(AddProperNamingSupport):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.hasColumn("tv_shows", "notify_list")
|
return self.hasColumn("tv_shows", "notify_list")
|
||||||
|
@ -419,6 +458,7 @@ class AddEmailSubscriptionTable(AddProperNamingSupport):
|
||||||
self.addColumn('tv_shows', 'notify_list', 'TEXT', None)
|
self.addColumn('tv_shows', 'notify_list', 'TEXT', None)
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddShowidTvdbidIndex(AddEmailSubscriptionTable):
|
class AddShowidTvdbidIndex(AddEmailSubscriptionTable):
|
||||||
""" Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries """
|
""" Adding index on tvdb_id (tv_shows) and showid (tv_episodes) to speed up searches/queries """
|
||||||
|
|
||||||
|
@ -439,6 +479,7 @@ class AddShowidTvdbidIndex(AddEmailSubscriptionTable):
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddLastUpdateTVDB(AddShowidTvdbidIndex):
|
class AddLastUpdateTVDB(AddShowidTvdbidIndex):
|
||||||
""" Adding column last_update_tvdb to tv_shows for controlling nightly updates """
|
""" Adding column last_update_tvdb to tv_shows for controlling nightly updates """
|
||||||
|
|
||||||
|
@ -454,6 +495,7 @@ class AddLastUpdateTVDB(AddShowidTvdbidIndex):
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddLastProperSearch(AddLastUpdateTVDB):
|
class AddLastProperSearch(AddLastUpdateTVDB):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 19
|
return self.checkDBVersion() >= 19
|
||||||
|
@ -467,6 +509,7 @@ class AddLastProperSearch(AddLastUpdateTVDB):
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddDvdOrderOption(AddLastProperSearch):
|
class AddDvdOrderOption(AddLastProperSearch):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 20
|
return self.checkDBVersion() >= 20
|
||||||
|
@ -478,6 +521,7 @@ class AddDvdOrderOption(AddLastProperSearch):
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddIndicesToTvEpisodes(AddDvdOrderOption):
|
class AddIndicesToTvEpisodes(AddDvdOrderOption):
|
||||||
""" Adding indices to tv episodes """
|
""" Adding indices to tv episodes """
|
||||||
|
|
||||||
|
@ -498,6 +542,7 @@ class AddIndicesToTvEpisodes(AddDvdOrderOption):
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class ConvertTVShowsToIndexerScheme(AddIndicesToTvEpisodes):
|
class ConvertTVShowsToIndexerScheme(AddIndicesToTvEpisodes):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 22
|
return self.checkDBVersion() >= 22
|
||||||
|
@ -507,14 +552,17 @@ class ConvertTVShowsToIndexerScheme(AddIndicesToTvEpisodes):
|
||||||
|
|
||||||
logger.log(u"Converting TV Shows table to Indexer Scheme...")
|
logger.log(u"Converting TV Shows table to Indexer Scheme...")
|
||||||
self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows")
|
self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows")
|
||||||
self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer TEXT, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC)")
|
self.connection.action(
|
||||||
self.connection.action("UPDATE tv_shows SET indexer = 'Tvdb'")
|
"CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMBERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC)")
|
||||||
|
self.connection.action("UPDATE tv_shows SET indexer = 1")
|
||||||
self.connection.action("UPDATE tv_shows SET classification = 'Scripted'")
|
self.connection.action("UPDATE tv_shows SET classification = 'Scripted'")
|
||||||
self.connection.action("INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows")
|
self.connection.action(
|
||||||
|
"INSERT INTO tv_shows(show_id, indexer_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_indexer, dvdorder) SELECT show_id, tvdb_id, show_name, location, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles, notify_list, imdb_id, last_update_tvdb, dvdorder FROM tmp_tv_shows")
|
||||||
self.connection.action("DROP TABLE tmp_tv_shows")
|
self.connection.action("DROP TABLE tmp_tv_shows")
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class ConvertTVEpisodesToIndexerScheme(ConvertTVShowsToIndexerScheme):
|
class ConvertTVEpisodesToIndexerScheme(ConvertTVShowsToIndexerScheme):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 23
|
return self.checkDBVersion() >= 23
|
||||||
|
@ -524,13 +572,16 @@ class ConvertTVEpisodesToIndexerScheme(ConvertTVShowsToIndexerScheme):
|
||||||
|
|
||||||
logger.log(u"Converting TV Episodes table to Indexer Scheme...")
|
logger.log(u"Converting TV Episodes table to Indexer Scheme...")
|
||||||
self.connection.action("ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes")
|
self.connection.action("ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes")
|
||||||
self.connection.action("CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer TEXT, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)")
|
self.connection.action(
|
||||||
self.connection.action("UPDATE tv_episodes SET indexer = 'Tvdb'")
|
"CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid NUMERIC, indexer NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC)")
|
||||||
self.connection.action("INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes")
|
self.connection.action("UPDATE tv_episodes SET indexer = 1")
|
||||||
|
self.connection.action(
|
||||||
|
"INSERT INTO tv_episodes(episode_id, showid, indexerid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper) SELECT episode_id, showid, tvdbid, name, season, episode, description, airdate, hasnfo, hastbn, status, location, file_size, release_name, subtitles, subtitles_searchcount, subtitles_lastsearch, is_proper FROM tmp_tv_episodes")
|
||||||
self.connection.action("DROP TABLE tmp_tv_episodes")
|
self.connection.action("DROP TABLE tmp_tv_episodes")
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class ConvertIMDBInfoToIndexerScheme(ConvertTVEpisodesToIndexerScheme):
|
class ConvertIMDBInfoToIndexerScheme(ConvertTVEpisodesToIndexerScheme):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 24
|
return self.checkDBVersion() >= 24
|
||||||
|
@ -540,12 +591,15 @@ class ConvertIMDBInfoToIndexerScheme(ConvertTVEpisodesToIndexerScheme):
|
||||||
|
|
||||||
logger.log(u"Converting IMDB Info table to Indexer Scheme...")
|
logger.log(u"Converting IMDB Info table to Indexer Scheme...")
|
||||||
self.connection.action("ALTER TABLE imdb_info RENAME TO tmp_imdb_info")
|
self.connection.action("ALTER TABLE imdb_info RENAME TO tmp_imdb_info")
|
||||||
self.connection.action("CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
|
self.connection.action(
|
||||||
self.connection.action("INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info")
|
"CREATE TABLE imdb_info (indexer_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
|
||||||
|
self.connection.action(
|
||||||
|
"INSERT INTO imdb_info(indexer_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update) SELECT tvdb_id, imdb_id, title, year, akas, runtimes, genres, countries, country_codes, certificates, rating, votes, last_update FROM tmp_imdb_info")
|
||||||
self.connection.action("DROP TABLE tmp_imdb_info")
|
self.connection.action("DROP TABLE tmp_imdb_info")
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class ConvertInfoToIndexerScheme(ConvertIMDBInfoToIndexerScheme):
|
class ConvertInfoToIndexerScheme(ConvertIMDBInfoToIndexerScheme):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 25
|
return self.checkDBVersion() >= 25
|
||||||
|
@ -555,12 +609,15 @@ class ConvertInfoToIndexerScheme(ConvertIMDBInfoToIndexerScheme):
|
||||||
|
|
||||||
logger.log(u"Converting Info table to Indexer Scheme...")
|
logger.log(u"Converting Info table to Indexer Scheme...")
|
||||||
self.connection.action("ALTER TABLE info RENAME TO tmp_info")
|
self.connection.action("ALTER TABLE info RENAME TO tmp_info")
|
||||||
self.connection.action("CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)")
|
self.connection.action(
|
||||||
self.connection.action("INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info")
|
"CREATE TABLE info (last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC)")
|
||||||
|
self.connection.action(
|
||||||
|
"INSERT INTO info(last_backlog, last_indexer, last_proper_search) SELECT last_backlog, last_tvdb, last_proper_search FROM tmp_info")
|
||||||
self.connection.action("DROP TABLE tmp_info")
|
self.connection.action("DROP TABLE tmp_info")
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddArchiveFirstMatchOption(ConvertInfoToIndexerScheme):
|
class AddArchiveFirstMatchOption(ConvertInfoToIndexerScheme):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 26
|
return self.checkDBVersion() >= 26
|
||||||
|
@ -572,6 +629,7 @@ class AddArchiveFirstMatchOption(ConvertInfoToIndexerScheme):
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
||||||
|
|
||||||
|
|
||||||
class AddSceneNumbering(AddArchiveFirstMatchOption):
|
class AddSceneNumbering(AddArchiveFirstMatchOption):
|
||||||
def test(self):
|
def test(self):
|
||||||
return self.checkDBVersion() >= 27
|
return self.checkDBVersion() >= 27
|
||||||
|
@ -582,6 +640,7 @@ class AddSceneNumbering(AddArchiveFirstMatchOption):
|
||||||
if self.hasTable("scene_numbering"):
|
if self.hasTable("scene_numbering"):
|
||||||
self.connection.action("DROP TABLE scene_numbering")
|
self.connection.action("DROP TABLE scene_numbering")
|
||||||
|
|
||||||
self.connection.action("CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
|
self.connection.action(
|
||||||
|
"CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
|
||||||
|
|
||||||
self.incDBVersion()
|
self.incDBVersion()
|
|
@ -32,6 +32,7 @@ from sickbeard.exceptions import ex
|
||||||
|
|
||||||
db_lock = threading.Lock()
|
db_lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
def dbFilename(filename="sickbeard.db", suffix=None):
|
def dbFilename(filename="sickbeard.db", suffix=None):
|
||||||
"""
|
"""
|
||||||
@param filename: The sqlite database filename to use. If not specified,
|
@param filename: The sqlite database filename to use. If not specified,
|
||||||
|
@ -44,6 +45,7 @@ def dbFilename(filename="sickbeard.db", suffix=None):
|
||||||
filename = "%s.%s" % (filename, suffix)
|
filename = "%s.%s" % (filename, suffix)
|
||||||
return ek.ek(os.path.join, sickbeard.DATA_DIR, filename)
|
return ek.ek(os.path.join, sickbeard.DATA_DIR, filename)
|
||||||
|
|
||||||
|
|
||||||
class DBConnection:
|
class DBConnection:
|
||||||
def __init__(self, filename="sickbeard.db", suffix=None, row_type=None):
|
def __init__(self, filename="sickbeard.db", suffix=None, row_type=None):
|
||||||
|
|
||||||
|
@ -198,9 +200,10 @@ class DBConnection:
|
||||||
|
|
||||||
changesBefore = self.connection.total_changes
|
changesBefore = self.connection.total_changes
|
||||||
|
|
||||||
genParams = lambda myDict : [x + " = ?" for x in myDict.keys()]
|
genParams = lambda myDict: [x + " = ?" for x in myDict.keys()]
|
||||||
|
|
||||||
query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(genParams(keyDict))
|
query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(
|
||||||
|
genParams(keyDict))
|
||||||
|
|
||||||
self.action(query, valueDict.values() + keyDict.values())
|
self.action(query, valueDict.values() + keyDict.values())
|
||||||
|
|
||||||
|
@ -214,7 +217,7 @@ class DBConnection:
|
||||||
cursor = self.connection.execute("PRAGMA table_info(%s)" % tableName)
|
cursor = self.connection.execute("PRAGMA table_info(%s)" % tableName)
|
||||||
columns = {}
|
columns = {}
|
||||||
for column in cursor:
|
for column in cursor:
|
||||||
columns[column['name']] = { 'type': column['type'] }
|
columns[column['name']] = {'type': column['type']}
|
||||||
return columns
|
return columns
|
||||||
|
|
||||||
# http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
|
# http://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
|
||||||
|
@ -224,9 +227,11 @@ class DBConnection:
|
||||||
d[col[0]] = row[idx]
|
d[col[0]] = row[idx]
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def sanityCheckDatabase(connection, sanity_check):
|
def sanityCheckDatabase(connection, sanity_check):
|
||||||
sanity_check(connection).check()
|
sanity_check(connection).check()
|
||||||
|
|
||||||
|
|
||||||
class DBSanityCheck(object):
|
class DBSanityCheck(object):
|
||||||
def __init__(self, connection):
|
def __init__(self, connection):
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
|
@ -234,6 +239,7 @@ class DBSanityCheck(object):
|
||||||
def check(self):
|
def check(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# ===============
|
# ===============
|
||||||
# = Upgrade API =
|
# = Upgrade API =
|
||||||
# ===============
|
# ===============
|
||||||
|
@ -242,9 +248,11 @@ def upgradeDatabase(connection, schema):
|
||||||
logger.log(u"Checking database structure...", logger.MESSAGE)
|
logger.log(u"Checking database structure...", logger.MESSAGE)
|
||||||
_processUpgrade(connection, schema)
|
_processUpgrade(connection, schema)
|
||||||
|
|
||||||
|
|
||||||
def prettyName(class_name):
|
def prettyName(class_name):
|
||||||
return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)])
|
return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)])
|
||||||
|
|
||||||
|
|
||||||
def _processUpgrade(connection, upgradeClass):
|
def _processUpgrade(connection, upgradeClass):
|
||||||
instance = upgradeClass(connection)
|
instance = upgradeClass(connection)
|
||||||
logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG)
|
logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG)
|
||||||
|
@ -262,8 +270,9 @@ def _processUpgrade(connection, upgradeClass):
|
||||||
for upgradeSubClass in upgradeClass.__subclasses__():
|
for upgradeSubClass in upgradeClass.__subclasses__():
|
||||||
_processUpgrade(connection, upgradeSubClass)
|
_processUpgrade(connection, upgradeSubClass)
|
||||||
|
|
||||||
|
|
||||||
# Base migration class. All future DB changes should be subclassed from this class
|
# Base migration class. All future DB changes should be subclassed from this class
|
||||||
class SchemaUpgrade (object):
|
class SchemaUpgrade(object):
|
||||||
def __init__(self, connection):
|
def __init__(self, connection):
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
|
|
||||||
|
|
|
@ -30,29 +30,36 @@ def fixStupidEncodings(x, silent=False):
|
||||||
try:
|
try:
|
||||||
return x.decode(sickbeard.SYS_ENCODING)
|
return x.decode(sickbeard.SYS_ENCODING)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
logger.log(u"Unable to decode value: "+repr(x), logger.ERROR)
|
logger.log(u"Unable to decode value: " + repr(x), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
elif type(x) == unicode:
|
elif type(x) == unicode:
|
||||||
return x
|
return x
|
||||||
else:
|
else:
|
||||||
logger.log(u"Unknown value passed in, ignoring it: "+str(type(x))+" ("+repr(x)+":"+repr(type(x))+")", logger.DEBUG if silent else logger.ERROR)
|
logger.log(
|
||||||
|
u"Unknown value passed in, ignoring it: " + str(type(x)) + " (" + repr(x) + ":" + repr(type(x)) + ")",
|
||||||
|
logger.DEBUG if silent else logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def fixListEncodings(x):
|
def fixListEncodings(x):
|
||||||
if type(x) != list and type(x) != tuple:
|
if type(x) != list and type(x) != tuple:
|
||||||
return x
|
return x
|
||||||
else:
|
else:
|
||||||
return filter(lambda x: x != None, map(fixStupidEncodings, x))
|
return filter(lambda x: x != None, map(fixStupidEncodings, x))
|
||||||
|
|
||||||
|
|
||||||
def callPeopleStupid(x):
|
def callPeopleStupid(x):
|
||||||
try:
|
try:
|
||||||
return x.encode(sickbeard.SYS_ENCODING)
|
return x.encode(sickbeard.SYS_ENCODING)
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
logger.log(u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: "+repr(x)+", "+sickbeard.SYS_ENCODING, logger.ERROR)
|
logger.log(
|
||||||
|
u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: " + repr(
|
||||||
|
x) + ", " + sickbeard.SYS_ENCODING, logger.ERROR)
|
||||||
return x.encode(sickbeard.SYS_ENCODING, 'ignore')
|
return x.encode(sickbeard.SYS_ENCODING, 'ignore')
|
||||||
|
|
||||||
|
|
||||||
def ek(func, *args, **kwargs):
|
def ek(func, *args, **kwargs):
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
|
|
|
@ -104,10 +104,6 @@ class NewzbinAPIThrottled(SickBeardException):
|
||||||
"Newzbin has throttled us, deal with it"
|
"Newzbin has throttled us, deal with it"
|
||||||
|
|
||||||
|
|
||||||
class TVRageException(SickBeardException):
|
|
||||||
"TVRage API did something bad"
|
|
||||||
|
|
||||||
|
|
||||||
class ShowDirNotFoundException(SickBeardException):
|
class ShowDirNotFoundException(SickBeardException):
|
||||||
"The show dir doesn't exist"
|
"The show dir doesn't exist"
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,9 @@ class FailedProcessor(object):
|
||||||
|
|
||||||
self._show_obj = helpers.findCertainShow(sickbeard.showList, show_id)
|
self._show_obj = helpers.findCertainShow(sickbeard.showList, show_id)
|
||||||
if self._show_obj is None:
|
if self._show_obj is None:
|
||||||
self._log(u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)", logger.WARNING)
|
self._log(
|
||||||
|
u"Could not create show object. Either the show hasn't been added to SickBeard, or it's still loading (if SB was restarted recently)",
|
||||||
|
logger.WARNING)
|
||||||
raise exceptions.FailedProcessingFailed()
|
raise exceptions.FailedProcessingFailed()
|
||||||
|
|
||||||
for episode in parsed.episode_numbers:
|
for episode in parsed.episode_numbers:
|
||||||
|
@ -105,7 +107,7 @@ class FailedProcessor(object):
|
||||||
for show_name in show_names:
|
for show_name in show_names:
|
||||||
found_info = helpers.searchDBForShow(show_name)
|
found_info = helpers.searchDBForShow(show_name)
|
||||||
if found_info is not None:
|
if found_info is not None:
|
||||||
return(found_info[1])
|
return (found_info[1])
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ import re
|
||||||
import urllib
|
import urllib
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard import exceptions
|
from sickbeard import exceptions
|
||||||
|
@ -38,7 +37,7 @@ def prepareFailedName(release):
|
||||||
"""Standardizes release name for failed DB"""
|
"""Standardizes release name for failed DB"""
|
||||||
|
|
||||||
fixed = urllib.unquote(release)
|
fixed = urllib.unquote(release)
|
||||||
if(fixed.endswith(".nzb")):
|
if (fixed.endswith(".nzb")):
|
||||||
fixed = fixed.rpartition(".")[0]
|
fixed = fixed.rpartition(".")[0]
|
||||||
|
|
||||||
fixed = re.sub("[\.\-\+\ ]", "_", fixed)
|
fixed = re.sub("[\.\-\+\ ]", "_", fixed)
|
||||||
|
@ -56,8 +55,10 @@ def logFailed(release):
|
||||||
sql_results = myDB.select("SELECT * FROM history WHERE release=?", [release])
|
sql_results = myDB.select("SELECT * FROM history WHERE release=?", [release])
|
||||||
|
|
||||||
if len(sql_results) == 0:
|
if len(sql_results) == 0:
|
||||||
log_str += _log_helper(u"Release not found in snatch history. Recording it as bad with no size and no proivder.", logger.WARNING)
|
log_str += _log_helper(
|
||||||
log_str += _log_helper(u"Future releases of the same name from providers that don't return size will be skipped.", logger.WARNING)
|
u"Release not found in snatch history. Recording it as bad with no size and no proivder.", logger.WARNING)
|
||||||
|
log_str += _log_helper(
|
||||||
|
u"Future releases of the same name from providers that don't return size will be skipped.", logger.WARNING)
|
||||||
elif len(sql_results) > 1:
|
elif len(sql_results) > 1:
|
||||||
log_str += _log_helper(u"Multiple logged snatches found for release", logger.WARNING)
|
log_str += _log_helper(u"Multiple logged snatches found for release", logger.WARNING)
|
||||||
sizes = len(set(x["size"] for x in sql_results))
|
sizes = len(set(x["size"] for x in sql_results))
|
||||||
|
@ -66,7 +67,9 @@ def logFailed(release):
|
||||||
log_str += _log_helper(u"However, they're all the same size. Continuing with found size.", logger.WARNING)
|
log_str += _log_helper(u"However, they're all the same size. Continuing with found size.", logger.WARNING)
|
||||||
size = sql_results[0]["size"]
|
size = sql_results[0]["size"]
|
||||||
else:
|
else:
|
||||||
log_str += _log_helper(u"They also vary in size. Deleting the logged snatches and recording this release with no size/provider", logger.WARNING)
|
log_str += _log_helper(
|
||||||
|
u"They also vary in size. Deleting the logged snatches and recording this release with no size/provider",
|
||||||
|
logger.WARNING)
|
||||||
for result in sql_results:
|
for result in sql_results:
|
||||||
deleteLoggedSnatch(result["release"], result["size"], result["provider"])
|
deleteLoggedSnatch(result["release"], result["size"], result["provider"])
|
||||||
|
|
||||||
|
@ -128,13 +131,15 @@ def revertEpisode(show_obj, season, episode=None):
|
||||||
log_str += _log_helper(u"Found in history")
|
log_str += _log_helper(u"Found in history")
|
||||||
ep_obj.status = history_eps[episode]['old_status']
|
ep_obj.status = history_eps[episode]['old_status']
|
||||||
else:
|
else:
|
||||||
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED", logger.WARNING)
|
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED",
|
||||||
|
logger.WARNING)
|
||||||
ep_obj.status = WANTED
|
ep_obj.status = WANTED
|
||||||
|
|
||||||
ep_obj.saveToDB()
|
ep_obj.saveToDB()
|
||||||
|
|
||||||
except exceptions.EpisodeNotFoundException, e:
|
except exceptions.EpisodeNotFoundException, e:
|
||||||
log_str += _log_helper(u"Unable to create episode, please set its status manually: " + exceptions.ex(e), logger.WARNING)
|
log_str += _log_helper(u"Unable to create episode, please set its status manually: " + exceptions.ex(e),
|
||||||
|
logger.WARNING)
|
||||||
else:
|
else:
|
||||||
# Whole season
|
# Whole season
|
||||||
log_str += _log_helper(u"Setting season to wanted: " + str(season))
|
log_str += _log_helper(u"Setting season to wanted: " + str(season))
|
||||||
|
@ -145,13 +150,15 @@ def revertEpisode(show_obj, season, episode=None):
|
||||||
log_str += _log_helper(u"Found in history")
|
log_str += _log_helper(u"Found in history")
|
||||||
ep_obj.status = history_eps[ep_obj]['old_status']
|
ep_obj.status = history_eps[ep_obj]['old_status']
|
||||||
else:
|
else:
|
||||||
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED", logger.WARNING)
|
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED",
|
||||||
|
logger.WARNING)
|
||||||
ep_obj.status = WANTED
|
ep_obj.status = WANTED
|
||||||
|
|
||||||
ep_obj.saveToDB()
|
ep_obj.saveToDB()
|
||||||
|
|
||||||
return log_str
|
return log_str
|
||||||
|
|
||||||
|
|
||||||
def markFailed(show_obj, season, episode=None):
|
def markFailed(show_obj, season, episode=None):
|
||||||
log_str = u""
|
log_str = u""
|
||||||
|
|
||||||
|
@ -165,7 +172,8 @@ def markFailed(show_obj, season, episode=None):
|
||||||
ep_obj.saveToDB()
|
ep_obj.saveToDB()
|
||||||
|
|
||||||
except exceptions.EpisodeNotFoundException, e:
|
except exceptions.EpisodeNotFoundException, e:
|
||||||
log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e), logger.WARNING)
|
log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e),
|
||||||
|
logger.WARNING)
|
||||||
else:
|
else:
|
||||||
# Whole season
|
# Whole season
|
||||||
for ep_obj in show_obj.getAllEpisodes(season):
|
for ep_obj in show_obj.getAllEpisodes(season):
|
||||||
|
@ -176,6 +184,7 @@ def markFailed(show_obj, season, episode=None):
|
||||||
|
|
||||||
return log_str
|
return log_str
|
||||||
|
|
||||||
|
|
||||||
def logSnatch(searchResult):
|
def logSnatch(searchResult):
|
||||||
myDB = db.DBConnection("failed.db")
|
myDB = db.DBConnection("failed.db")
|
||||||
|
|
||||||
|
@ -196,7 +205,8 @@ def logSnatch(searchResult):
|
||||||
myDB.action(
|
myDB.action(
|
||||||
"INSERT INTO history (date, size, release, provider, showid, season, episode, old_status)"
|
"INSERT INTO history (date, size, release, provider, showid, season, episode, old_status)"
|
||||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
[logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode, old_status])
|
[logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode,
|
||||||
|
old_status])
|
||||||
|
|
||||||
|
|
||||||
def deleteLoggedSnatch(release, size, provider):
|
def deleteLoggedSnatch(release, size, provider):
|
||||||
|
@ -210,7 +220,8 @@ def deleteLoggedSnatch(release, size, provider):
|
||||||
|
|
||||||
def trimHistory():
|
def trimHistory():
|
||||||
myDB = db.DBConnection("failed.db")
|
myDB = db.DBConnection("failed.db")
|
||||||
myDB.action("DELETE FROM history WHERE date < " + str((datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
|
myDB.action("DELETE FROM history WHERE date < " + str(
|
||||||
|
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
|
||||||
|
|
||||||
|
|
||||||
def findRelease(show, season, episode):
|
def findRelease(show, season, episode):
|
||||||
|
@ -227,10 +238,13 @@ def findRelease(show, season, episode):
|
||||||
myDB = db.DBConnection("failed.db")
|
myDB = db.DBConnection("failed.db")
|
||||||
|
|
||||||
# Clear old snatches for this release if any exist
|
# Clear old snatches for this release if any exist
|
||||||
myDB.action("DELETE FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + ")")
|
myDB.action("DELETE FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str(
|
||||||
|
season) + " AND episode=" + str(episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(
|
||||||
|
show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + ")")
|
||||||
|
|
||||||
# Search for release in snatch history
|
# Search for release in snatch history
|
||||||
results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?",[show.indexerid, season, episode])
|
results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?",
|
||||||
|
[show.indexerid, season, episode])
|
||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
release = str(result["release"])
|
release = str(result["release"])
|
||||||
|
@ -238,7 +252,7 @@ def findRelease(show, season, episode):
|
||||||
date = result["date"]
|
date = result["date"]
|
||||||
|
|
||||||
# Clear any incomplete snatch records for this release if any exist
|
# Clear any incomplete snatch records for this release if any exist
|
||||||
myDB.action("DELETE FROM history WHERE release=? AND date!=?",[release, date])
|
myDB.action("DELETE FROM history WHERE release=? AND date!=?", [release, date])
|
||||||
|
|
||||||
# Found a previously failed release
|
# Found a previously failed release
|
||||||
logger.log(u"Failed release found for season (%s): (%s)" % (season, result["release"]), logger.DEBUG)
|
logger.log(u"Failed release found for season (%s): (%s)" % (season, result["release"]), logger.DEBUG)
|
||||||
|
|
|
@ -21,13 +21,14 @@ import threading
|
||||||
|
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
|
|
||||||
|
|
||||||
class QueuePriorities:
|
class QueuePriorities:
|
||||||
LOW = 10
|
LOW = 10
|
||||||
NORMAL = 20
|
NORMAL = 20
|
||||||
HIGH = 30
|
HIGH = 30
|
||||||
|
|
||||||
class GenericQueue(object):
|
|
||||||
|
|
||||||
|
class GenericQueue(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
self.currentItem = None
|
self.currentItem = None
|
||||||
|
@ -69,7 +70,7 @@ class GenericQueue(object):
|
||||||
if len(self.queue) > 0:
|
if len(self.queue) > 0:
|
||||||
|
|
||||||
# sort by priority
|
# sort by priority
|
||||||
def sorter(x,y):
|
def sorter(x, y):
|
||||||
"""
|
"""
|
||||||
Sorts by priority descending then time ascending
|
Sorts by priority descending then time ascending
|
||||||
"""
|
"""
|
||||||
|
@ -81,7 +82,7 @@ class GenericQueue(object):
|
||||||
elif y.added > x.added:
|
elif y.added > x.added:
|
||||||
return -1
|
return -1
|
||||||
else:
|
else:
|
||||||
return y.priority-x.priority
|
return y.priority - x.priority
|
||||||
|
|
||||||
self.queue.sort(cmp=sorter)
|
self.queue.sort(cmp=sorter)
|
||||||
|
|
||||||
|
@ -101,8 +102,9 @@ class GenericQueue(object):
|
||||||
# take it out of the queue
|
# take it out of the queue
|
||||||
del self.queue[0]
|
del self.queue[0]
|
||||||
|
|
||||||
|
|
||||||
class QueueItem:
|
class QueueItem:
|
||||||
def __init__(self, name, action_id = 0):
|
def __init__(self, name, action_id=0):
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
self.inProgress = False
|
self.inProgress = False
|
||||||
|
@ -119,7 +121,7 @@ class QueueItem:
|
||||||
if self.thread_name:
|
if self.thread_name:
|
||||||
return self.thread_name
|
return self.thread_name
|
||||||
else:
|
else:
|
||||||
return self.name.replace(" ","-").upper()
|
return self.name.replace(" ", "-").upper()
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
"""Implementing classes should call this"""
|
"""Implementing classes should call this"""
|
||||||
|
|
|
@ -69,7 +69,8 @@ class GitHub(object):
|
||||||
|
|
||||||
Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
|
Returns a deserialized json object containing the commit info. See http://developer.github.com/v3/repos/commits/
|
||||||
"""
|
"""
|
||||||
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'], params={'per_page': 100, 'sha': self.branch})
|
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'commits'],
|
||||||
|
params={'per_page': 100, 'sha': self.branch})
|
||||||
return access_API
|
return access_API
|
||||||
|
|
||||||
def compare(self, base, head, per_page=1):
|
def compare(self, base, head, per_page=1):
|
||||||
|
@ -84,5 +85,7 @@ class GitHub(object):
|
||||||
|
|
||||||
Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/
|
Returns a deserialized json object containing the compare info. See http://developer.github.com/v3/repos/commits/
|
||||||
"""
|
"""
|
||||||
access_API = self._access_API(['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head], params={'per_page': per_page})
|
access_API = self._access_API(
|
||||||
|
['repos', self.github_repo_user, self.github_repo, 'compare', base + '...' + head],
|
||||||
|
params={'per_page': per_page})
|
||||||
return access_API
|
return access_API
|
||||||
|
|
|
@ -31,10 +31,10 @@ import httplib
|
||||||
import urlparse
|
import urlparse
|
||||||
import uuid
|
import uuid
|
||||||
import base64
|
import base64
|
||||||
|
import string
|
||||||
|
|
||||||
from lib import requests
|
from lib import requests
|
||||||
from itertools import izip, cycle
|
from itertools import izip, cycle
|
||||||
from contextlib import closing
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import json
|
import json
|
||||||
|
@ -47,20 +47,17 @@ except ImportError:
|
||||||
import elementtree.ElementTree as etree
|
import elementtree.ElementTree as etree
|
||||||
|
|
||||||
from xml.dom.minidom import Node
|
from xml.dom.minidom import Node
|
||||||
from datetime import datetime as dt
|
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
|
||||||
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
||||||
from sickbeard import logger, classes
|
from sickbeard import logger, classes
|
||||||
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP, indexerStrings
|
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, XML_NSMAP
|
||||||
|
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
from sickbeard import notifiers
|
from sickbeard import notifiers
|
||||||
|
|
||||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
|
||||||
|
|
||||||
from lib import subliminal
|
from lib import subliminal
|
||||||
#from sickbeard.subtitles import EXTENSIONS
|
#from sickbeard.subtitles import EXTENSIONS
|
||||||
|
|
||||||
|
@ -88,6 +85,7 @@ def indentXML(elem, level=0):
|
||||||
if level and (not elem.tail or not elem.tail.strip()):
|
if level and (not elem.tail or not elem.tail.strip()):
|
||||||
elem.tail = i
|
elem.tail = i
|
||||||
|
|
||||||
|
|
||||||
def replaceExtension(filename, newExt):
|
def replaceExtension(filename, newExt):
|
||||||
'''
|
'''
|
||||||
>>> replaceExtension('foo.avi', 'mkv')
|
>>> replaceExtension('foo.avi', 'mkv')
|
||||||
|
@ -107,6 +105,7 @@ def replaceExtension(filename, newExt):
|
||||||
else:
|
else:
|
||||||
return sepFile[0] + "." + newExt
|
return sepFile[0] + "." + newExt
|
||||||
|
|
||||||
|
|
||||||
def isMediaFile(filename):
|
def isMediaFile(filename):
|
||||||
# ignore samples
|
# ignore samples
|
||||||
if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I):
|
if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I):
|
||||||
|
@ -126,6 +125,7 @@ def isMediaFile(filename):
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def isRarFile(filename):
|
def isRarFile(filename):
|
||||||
archive_regex = '(?P<file>^(?P<base>(?:(?!\.part\d+\.rar$).)*)\.(?:(?:part0*1\.)?rar)$)'
|
archive_regex = '(?P<file>^(?P<base>(?:(?!\.part\d+\.rar$).)*)\.(?:(?:part0*1\.)?rar)$)'
|
||||||
|
|
||||||
|
@ -134,14 +134,16 @@ def isRarFile(filename):
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def isBeingWritten(filepath):
|
def isBeingWritten(filepath):
|
||||||
# Return True if file was modified within 60 seconds. it might still be being written to.
|
# Return True if file was modified within 60 seconds. it might still be being written to.
|
||||||
ctime = max(ek.ek(os.path.getctime, filepath), ek.ek(os.path.getmtime, filepath))
|
ctime = max(ek.ek(os.path.getctime, filepath), ek.ek(os.path.getmtime, filepath))
|
||||||
if ctime > time.time() - 60:
|
if ctime > time.time() - 60:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def sanitizeFileName(name):
|
def sanitizeFileName(name):
|
||||||
'''
|
'''
|
||||||
>>> sanitizeFileName('a/b/c')
|
>>> sanitizeFileName('a/b/c')
|
||||||
|
@ -196,12 +198,14 @@ Returns a byte-string retrieved from the url provider.
|
||||||
|
|
||||||
return resp.content if resp.ok else None
|
return resp.content if resp.ok else None
|
||||||
|
|
||||||
|
|
||||||
def _remove_file_failed(file):
|
def _remove_file_failed(file):
|
||||||
try:
|
try:
|
||||||
ek.ek(os.remove,file)
|
ek.ek(os.remove, file)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def download_file(url, filename):
|
def download_file(url, filename):
|
||||||
try:
|
try:
|
||||||
r = requests.get(url, stream=True)
|
r = requests.get(url, stream=True)
|
||||||
|
@ -231,6 +235,7 @@ def download_file(url, filename):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def findCertainShow(showList, indexerid):
|
def findCertainShow(showList, indexerid):
|
||||||
results = filter(lambda x: x.indexerid == indexerid, showList)
|
results = filter(lambda x: x.indexerid == indexerid, showList)
|
||||||
if len(results) == 0:
|
if len(results) == 0:
|
||||||
|
@ -240,6 +245,7 @@ def findCertainShow(showList, indexerid):
|
||||||
else:
|
else:
|
||||||
return results[0]
|
return results[0]
|
||||||
|
|
||||||
|
|
||||||
def makeDir(path):
|
def makeDir(path):
|
||||||
if not ek.ek(os.path.isdir, path):
|
if not ek.ek(os.path.isdir, path):
|
||||||
try:
|
try:
|
||||||
|
@ -252,8 +258,7 @@ def makeDir(path):
|
||||||
|
|
||||||
|
|
||||||
def searchDBForShow(regShowName, indexer_id=None):
|
def searchDBForShow(regShowName, indexer_id=None):
|
||||||
|
showNames = [re.sub('[. -]', ' ', regShowName), regShowName]
|
||||||
showNames = [re.sub('[. -]', ' ', regShowName),regShowName]
|
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
|
|
||||||
|
@ -263,20 +268,25 @@ def searchDBForShow(regShowName, indexer_id=None):
|
||||||
|
|
||||||
show = get_show_by_name(showName, sickbeard.showList)
|
show = get_show_by_name(showName, sickbeard.showList)
|
||||||
if show:
|
if show:
|
||||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?", [show.name, show.name])
|
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?",
|
||||||
|
[show.name, show.name])
|
||||||
else:
|
else:
|
||||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?", [showName, showName])
|
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ? OR show_name LIKE ?",
|
||||||
|
[showName, showName])
|
||||||
|
|
||||||
if len(sqlResults) == 1:
|
if len(sqlResults) == 1:
|
||||||
return (sqlResults[0]["indexer"], int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
# if we didn't get exactly one result then try again with the year stripped off if possible
|
# if we didn't get exactly one result then try again with the year stripped off if possible
|
||||||
match = re.match(yearRegex, showName)
|
match = re.match(yearRegex, showName)
|
||||||
if match and match.group(1):
|
if match and match.group(1):
|
||||||
logger.log(u"Unable to match original name but trying to manually strip and specify show year", logger.DEBUG)
|
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
|
||||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?", [match.group(1) + '%', match.group(1) + '%', match.group(3)])
|
logger.DEBUG)
|
||||||
|
sqlResults = myDB.select(
|
||||||
|
"SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?",
|
||||||
|
[match.group(1) + '%', match.group(1) + '%', match.group(3)])
|
||||||
|
|
||||||
if len(sqlResults) == 0:
|
if len(sqlResults) == 0:
|
||||||
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
|
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
|
||||||
|
@ -285,49 +295,52 @@ def searchDBForShow(regShowName, indexer_id=None):
|
||||||
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name", logger.DEBUG)
|
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
return (sqlResults[0]["indexer"], int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
return (int(sqlResults[0]["indexer"]), int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def searchIndexersForShow(regShowName, indexer_id = None):
|
|
||||||
|
|
||||||
showNames = [re.sub('[. -]', ' ', regShowName),regShowName]
|
def searchIndexersForShow(regShowName, indexer_id=None):
|
||||||
|
showNames = [re.sub('[. -]', ' ', regShowName), regShowName]
|
||||||
|
|
||||||
|
# Query Indexers for each search term and build the list of results
|
||||||
|
for indexer in sickbeard.indexerApi().indexers:
|
||||||
|
def searchShows():
|
||||||
|
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||||
|
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||||
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
for name in showNames:
|
for name in showNames:
|
||||||
for indexer in indexerStrings:
|
logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(indexer).name, logger.DEBUG)
|
||||||
logger.log(u"Trying to find the " + name + " on " + indexer, logger.DEBUG)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
if indexer_id:
|
||||||
|
search = t[indexer_id]
|
||||||
|
else:
|
||||||
|
search = t[name]
|
||||||
|
|
||||||
lINDEXER_API_PARMS['search_all_languages'] = True
|
if isinstance(search, dict):
|
||||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
search = [search]
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
# add search results
|
||||||
showObj = t[name]
|
result = [[t.config['id'], x['id']] for x in search if name.lower() == x['seriesname'].lower()]
|
||||||
return indexer
|
if len(result) > 0:
|
||||||
except (indexer_exceptions.indexer_exception, IOError):
|
result = [item for sublist in result for item in sublist]
|
||||||
# if none found, search on all languages
|
return result
|
||||||
try:
|
|
||||||
# There's gotta be a better way of doing this but we don't wanna
|
|
||||||
# change the language value elsewhere
|
|
||||||
|
|
||||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
except KeyError, e:
|
||||||
|
break
|
||||||
|
|
||||||
lINDEXER_API_PARMS['search_all_languages'] = True
|
except Exception, e:
|
||||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
logger.log(
|
||||||
|
u"Error while auto-detecting show indexer and indexerid on indexer " + sickbeard.indexerApi(
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
indexer).name + ", retrying: " + ex(e), logger.ERROR)
|
||||||
showObj = t[name]
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
return indexer
|
|
||||||
except (indexer_exceptions.indexer_exception, IOError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
continue
|
|
||||||
except (IOError):
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return None
|
# search indexers for shows
|
||||||
|
found = searchShows()
|
||||||
|
if found: return found
|
||||||
|
|
||||||
|
|
||||||
def sizeof_fmt(num):
|
def sizeof_fmt(num):
|
||||||
'''
|
'''
|
||||||
|
@ -347,8 +360,8 @@ def sizeof_fmt(num):
|
||||||
return "%3.1f %s" % (num, x)
|
return "%3.1f %s" % (num, x)
|
||||||
num /= 1024.0
|
num /= 1024.0
|
||||||
|
|
||||||
def listMediaFiles(path):
|
|
||||||
|
|
||||||
|
def listMediaFiles(path):
|
||||||
if not dir or not ek.ek(os.path.isdir, path):
|
if not dir or not ek.ek(os.path.isdir, path):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -365,6 +378,7 @@ def listMediaFiles(path):
|
||||||
|
|
||||||
return files
|
return files
|
||||||
|
|
||||||
|
|
||||||
def copyFile(srcFile, destFile):
|
def copyFile(srcFile, destFile):
|
||||||
ek.ek(shutil.copyfile, srcFile, destFile)
|
ek.ek(shutil.copyfile, srcFile, destFile)
|
||||||
try:
|
try:
|
||||||
|
@ -372,6 +386,7 @@ def copyFile(srcFile, destFile):
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def moveFile(srcFile, destFile):
|
def moveFile(srcFile, destFile):
|
||||||
try:
|
try:
|
||||||
ek.ek(os.rename, srcFile, destFile)
|
ek.ek(os.rename, srcFile, destFile)
|
||||||
|
@ -380,13 +395,16 @@ def moveFile(srcFile, destFile):
|
||||||
copyFile(srcFile, destFile)
|
copyFile(srcFile, destFile)
|
||||||
ek.ek(os.unlink, srcFile)
|
ek.ek(os.unlink, srcFile)
|
||||||
|
|
||||||
|
|
||||||
def link(src, dst):
|
def link(src, dst):
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
|
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
|
||||||
else:
|
else:
|
||||||
os.link(src, dst)
|
os.link(src, dst)
|
||||||
|
|
||||||
|
|
||||||
def hardlinkFile(srcFile, destFile):
|
def hardlinkFile(srcFile, destFile):
|
||||||
try:
|
try:
|
||||||
ek.ek(link, srcFile, destFile)
|
ek.ek(link, srcFile, destFile)
|
||||||
|
@ -395,13 +413,17 @@ def hardlinkFile(srcFile, destFile):
|
||||||
logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
||||||
copyFile(srcFile, destFile)
|
copyFile(srcFile, destFile)
|
||||||
|
|
||||||
|
|
||||||
def symlink(src, dst):
|
def symlink(src, dst):
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
import ctypes
|
import ctypes
|
||||||
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
|
|
||||||
|
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0,
|
||||||
|
1280]: raise ctypes.WinError()
|
||||||
else:
|
else:
|
||||||
os.symlink(src, dst)
|
os.symlink(src, dst)
|
||||||
|
|
||||||
|
|
||||||
def moveAndSymlinkFile(srcFile, destFile):
|
def moveAndSymlinkFile(srcFile, destFile):
|
||||||
try:
|
try:
|
||||||
ek.ek(os.rename, srcFile, destFile)
|
ek.ek(os.rename, srcFile, destFile)
|
||||||
|
@ -411,6 +433,7 @@ def moveAndSymlinkFile(srcFile, destFile):
|
||||||
logger.log(u"Failed to create symlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
logger.log(u"Failed to create symlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR)
|
||||||
copyFile(srcFile, destFile)
|
copyFile(srcFile, destFile)
|
||||||
|
|
||||||
|
|
||||||
def make_dirs(path):
|
def make_dirs(path):
|
||||||
"""
|
"""
|
||||||
Creates any folders that are missing and assigns them the permissions of their
|
Creates any folders that are missing and assigns them the permissions of their
|
||||||
|
@ -483,7 +506,7 @@ def rename_ep_file(cur_path, new_path, old_path_length=0):
|
||||||
#Check if the language extracted from filename is a valid language
|
#Check if the language extracted from filename is a valid language
|
||||||
try:
|
try:
|
||||||
language = subliminal.language.Language(sublang, strict=True)
|
language = subliminal.language.Language(sublang, strict=True)
|
||||||
cur_file_ext = '.'+sublang+cur_file_ext
|
cur_file_ext = '.' + sublang + cur_file_ext
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -524,7 +547,8 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
|
||||||
|
|
||||||
check_files = ek.ek(os.listdir, check_empty_dir)
|
check_files = ek.ek(os.listdir, check_empty_dir)
|
||||||
|
|
||||||
if not check_files or (len(check_files) <= len(ignore_items) and all([check_file in ignore_items for check_file in check_files])):
|
if not check_files or (len(check_files) <= len(ignore_items) and all(
|
||||||
|
[check_file in ignore_items for check_file in check_files])):
|
||||||
# directory is empty or contains only ignore_items
|
# directory is empty or contains only ignore_items
|
||||||
try:
|
try:
|
||||||
logger.log(u"Deleting empty folder: " + check_empty_dir)
|
logger.log(u"Deleting empty folder: " + check_empty_dir)
|
||||||
|
@ -539,6 +563,7 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
def chmodAsParent(childPath):
|
def chmodAsParent(childPath):
|
||||||
if os.name == 'nt' or os.name == 'ce':
|
if os.name == 'nt' or os.name == 'ce':
|
||||||
return
|
return
|
||||||
|
@ -566,16 +591,18 @@ def chmodAsParent(childPath):
|
||||||
childPath_owner = childPathStat.st_uid
|
childPath_owner = childPathStat.st_uid
|
||||||
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
||||||
|
|
||||||
if user_id !=0 and user_id != childPath_owner:
|
if user_id != 0 and user_id != childPath_owner:
|
||||||
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set permissions", logger.DEBUG)
|
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set permissions", logger.DEBUG)
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ek.ek(os.chmod, childPath, childMode)
|
ek.ek(os.chmod, childPath, childMode)
|
||||||
logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode), logger.DEBUG)
|
logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode),
|
||||||
|
logger.DEBUG)
|
||||||
except OSError:
|
except OSError:
|
||||||
logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR)
|
logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR)
|
||||||
|
|
||||||
|
|
||||||
def fileBitFilter(mode):
|
def fileBitFilter(mode):
|
||||||
for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]:
|
for bit in [stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH, stat.S_ISUID, stat.S_ISGID]:
|
||||||
if mode & bit:
|
if mode & bit:
|
||||||
|
@ -583,6 +610,7 @@ def fileBitFilter(mode):
|
||||||
|
|
||||||
return mode
|
return mode
|
||||||
|
|
||||||
|
|
||||||
def fixSetGroupID(childPath):
|
def fixSetGroupID(childPath):
|
||||||
if os.name == 'nt' or os.name == 'ce':
|
if os.name == 'nt' or os.name == 'ce':
|
||||||
return
|
return
|
||||||
|
@ -602,17 +630,21 @@ def fixSetGroupID(childPath):
|
||||||
childPath_owner = childStat.st_uid
|
childPath_owner = childStat.st_uid
|
||||||
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
user_id = os.geteuid() # @UndefinedVariable - only available on UNIX
|
||||||
|
|
||||||
if user_id !=0 and user_id != childPath_owner:
|
if user_id != 0 and user_id != childPath_owner:
|
||||||
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set the set-group-ID", logger.DEBUG)
|
logger.log(u"Not running as root or owner of " + childPath + ", not trying to set the set-group-ID",
|
||||||
|
logger.DEBUG)
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ek.ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX
|
ek.ek(os.chown, childPath, -1, parentGID) # @UndefinedVariable - only available on UNIX
|
||||||
logger.log(u"Respecting the set-group-ID bit on the parent directory for %s" % (childPath), logger.DEBUG)
|
logger.log(u"Respecting the set-group-ID bit on the parent directory for %s" % (childPath), logger.DEBUG)
|
||||||
except OSError:
|
except OSError:
|
||||||
logger.log(u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (childPath, parentGID), logger.ERROR)
|
logger.log(
|
||||||
|
u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (
|
||||||
|
childPath, parentGID), logger.ERROR)
|
||||||
|
|
||||||
def sanitizeSceneName (name, ezrss=False):
|
|
||||||
|
def sanitizeSceneName(name, ezrss=False):
|
||||||
"""
|
"""
|
||||||
Takes a show name and returns the "scenified" version of it.
|
Takes a show name and returns the "scenified" version of it.
|
||||||
|
|
||||||
|
@ -640,13 +672,15 @@ def sanitizeSceneName (name, ezrss=False):
|
||||||
|
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
def create_https_certificates(ssl_cert, ssl_key):
|
def create_https_certificates(ssl_cert, ssl_key):
|
||||||
"""
|
"""
|
||||||
Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
|
Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key'
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
from lib.OpenSSL import crypto # @UnresolvedImport
|
from lib.OpenSSL import crypto # @UnresolvedImport
|
||||||
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, serial # @UnresolvedImport
|
from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, \
|
||||||
|
serial # @UnresolvedImport
|
||||||
except:
|
except:
|
||||||
logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
|
logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING)
|
||||||
return False
|
return False
|
||||||
|
@ -659,7 +693,7 @@ def create_https_certificates(ssl_cert, ssl_key):
|
||||||
cname = 'SickBeard'
|
cname = 'SickBeard'
|
||||||
pkey = createKeyPair(TYPE_RSA, 1024)
|
pkey = createKeyPair(TYPE_RSA, 1024)
|
||||||
req = createCertRequest(pkey, CN=cname)
|
req = createCertRequest(pkey, CN=cname)
|
||||||
cert = createCertificate(req, (cacert, cakey), serial, (0, 60* 60 * 24 * 365 *10)) # ten years
|
cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years
|
||||||
|
|
||||||
# Save the key and certificate to disk
|
# Save the key and certificate to disk
|
||||||
try:
|
try:
|
||||||
|
@ -671,8 +705,10 @@ def create_https_certificates(ssl_cert, ssl_key):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import doctest
|
import doctest
|
||||||
|
|
||||||
doctest.testmod()
|
doctest.testmod()
|
||||||
|
|
||||||
|
|
||||||
|
@ -771,14 +807,17 @@ def backupVersionedFile(old_file, version):
|
||||||
|
|
||||||
|
|
||||||
# try to convert to int, if it fails the default will be returned
|
# try to convert to int, if it fails the default will be returned
|
||||||
def tryInt(s, s_default = 0):
|
def tryInt(s, s_default=0):
|
||||||
try: return int(s)
|
try:
|
||||||
except: return s_default
|
return int(s)
|
||||||
|
except:
|
||||||
|
return s_default
|
||||||
|
|
||||||
|
|
||||||
# generates a md5 hash of a file
|
# generates a md5 hash of a file
|
||||||
def md5_for_file(filename, block_size=2**16):
|
def md5_for_file(filename, block_size=2 ** 16):
|
||||||
try:
|
try:
|
||||||
with open(filename,'rb') as f:
|
with open(filename, 'rb') as f:
|
||||||
md5 = hashlib.md5()
|
md5 = hashlib.md5()
|
||||||
while True:
|
while True:
|
||||||
data = f.read(block_size)
|
data = f.read(block_size)
|
||||||
|
@ -790,6 +829,7 @@ def md5_for_file(filename, block_size=2**16):
|
||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_lan_ip():
|
def get_lan_ip():
|
||||||
"""
|
"""
|
||||||
Simple function to get LAN localhost_ip
|
Simple function to get LAN localhost_ip
|
||||||
|
@ -827,6 +867,7 @@ def get_lan_ip():
|
||||||
pass
|
pass
|
||||||
return ip
|
return ip
|
||||||
|
|
||||||
|
|
||||||
def check_url(url):
|
def check_url(url):
|
||||||
"""
|
"""
|
||||||
Check if a URL exists without downloading the whole file.
|
Check if a URL exists without downloading the whole file.
|
||||||
|
@ -860,27 +901,30 @@ To add a new encryption_version:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Key Generators
|
# Key Generators
|
||||||
unique_key1 = hex(uuid.getnode()**2) # Used in encryption v1
|
unique_key1 = hex(uuid.getnode() ** 2) # Used in encryption v1
|
||||||
|
|
||||||
# Encryption Functions
|
# Encryption Functions
|
||||||
def encrypt(data, encryption_version=0, decrypt=False):
|
def encrypt(data, encryption_version=0, decrypt=False):
|
||||||
|
|
||||||
# Version 1: Simple XOR encryption (this is not very secure, but works)
|
# Version 1: Simple XOR encryption (this is not very secure, but works)
|
||||||
if encryption_version == 1:
|
if encryption_version == 1:
|
||||||
if decrypt:
|
if decrypt:
|
||||||
return ''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(base64.decodestring(data), cycle(unique_key1)))
|
return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(base64.decodestring(data), cycle(unique_key1)))
|
||||||
else:
|
else:
|
||||||
return base64.encodestring(''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(data, cycle(unique_key1)))).strip()
|
return base64.encodestring(
|
||||||
|
''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(data, cycle(unique_key1)))).strip()
|
||||||
# Version 0: Plain text
|
# Version 0: Plain text
|
||||||
else:
|
else:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def decrypt(data, encryption_version=0):
|
def decrypt(data, encryption_version=0):
|
||||||
return encrypt(data, encryption_version, decrypt=True)
|
return encrypt(data, encryption_version, decrypt=True)
|
||||||
|
|
||||||
|
|
||||||
def full_sanitizeSceneName(name):
|
def full_sanitizeSceneName(name):
|
||||||
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
||||||
|
|
||||||
|
|
||||||
def _check_against_names(name, show):
|
def _check_against_names(name, show):
|
||||||
nameInQuestion = full_sanitizeSceneName(name)
|
nameInQuestion = full_sanitizeSceneName(name)
|
||||||
|
|
||||||
|
@ -895,25 +939,26 @@ def _check_against_names(name, show):
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def get_show_by_name(name, showList, useIndexer=False):
|
def get_show_by_name(name, showList, useIndexer=False):
|
||||||
logger.log(u"Trying to get the indexerid for "+name, logger.DEBUG)
|
logger.log(u"Trying to get the indexerid for " + name, logger.DEBUG)
|
||||||
|
|
||||||
if showList:
|
if showList:
|
||||||
for show in showList:
|
for show in showList:
|
||||||
if _check_against_names(name, show):
|
if _check_against_names(name, show):
|
||||||
logger.log(u"Matched "+name+" in the showlist to the show "+show.name, logger.DEBUG)
|
logger.log(u"Matched " + name + " in the showlist to the show " + show.name, logger.DEBUG)
|
||||||
return show
|
return show
|
||||||
|
|
||||||
if useIndexer:
|
if useIndexer:
|
||||||
for indexer in indexerStrings:
|
for indexer in sickbeard.indexerApi().indexers:
|
||||||
try:
|
try:
|
||||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||||
|
|
||||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
showObj = t[name]
|
showObj = t[name]
|
||||||
except (indexer_exceptions.indexer_exception, IOError):
|
except (sickbeard.indexer_exception, IOError):
|
||||||
# if none found, search on all languages
|
# if none found, search on all languages
|
||||||
try:
|
try:
|
||||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||||
|
@ -921,9 +966,9 @@ def get_show_by_name(name, showList, useIndexer=False):
|
||||||
lINDEXER_API_PARMS['search_all_languages'] = True
|
lINDEXER_API_PARMS['search_all_languages'] = True
|
||||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
showObj = t[name]
|
showObj = t[name]
|
||||||
except (indexer_exceptions.indexer_exception, IOError):
|
except (sickbeard.indexer_exception, IOError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
@ -936,12 +981,15 @@ def get_show_by_name(name, showList, useIndexer=False):
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def suffix(d):
|
def suffix(d):
|
||||||
return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th')
|
return 'th' if 11 <= d <= 13 else {1: 'st', 2: 'nd', 3: 'rd'}.get(d % 10, 'th')
|
||||||
|
|
||||||
|
|
||||||
def custom_strftime(format, t):
|
def custom_strftime(format, t):
|
||||||
return t.strftime(format).replace('{S}', str(t.day) + suffix(t.day))
|
return t.strftime(format).replace('{S}', str(t.day) + suffix(t.day))
|
||||||
|
|
||||||
|
|
||||||
def is_hidden_folder(folder):
|
def is_hidden_folder(folder):
|
||||||
"""
|
"""
|
||||||
Returns True if folder is hidden.
|
Returns True if folder is hidden.
|
||||||
|
@ -954,6 +1002,7 @@ def is_hidden_folder(folder):
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def real_path(path):
|
def real_path(path):
|
||||||
"""
|
"""
|
||||||
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
|
Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components.
|
||||||
|
|
|
@ -24,20 +24,20 @@ from sickbeard.common import SNATCHED, SUBTITLED, FAILED, Quality
|
||||||
|
|
||||||
dateFormat = "%Y%m%d%H%M%S"
|
dateFormat = "%Y%m%d%H%M%S"
|
||||||
|
|
||||||
def _logHistoryItem(action, showid, season, episode, quality, resource, provider):
|
|
||||||
|
|
||||||
|
def _logHistoryItem(action, showid, season, episode, quality, resource, provider):
|
||||||
logDate = datetime.datetime.today().strftime(dateFormat)
|
logDate = datetime.datetime.today().strftime(dateFormat)
|
||||||
|
|
||||||
if not isinstance(resource, unicode):
|
if not isinstance(resource, unicode):
|
||||||
resource = unicode(resource, 'utf-8')
|
resource = unicode(resource, 'utf-8')
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
myDB.action("INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
|
myDB.action(
|
||||||
|
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
|
||||||
[action, logDate, showid, season, episode, quality, resource, provider])
|
[action, logDate, showid, season, episode, quality, resource, provider])
|
||||||
|
|
||||||
|
|
||||||
def logSnatch(searchResult):
|
def logSnatch(searchResult):
|
||||||
|
|
||||||
for curEpObj in searchResult.episodes:
|
for curEpObj in searchResult.episodes:
|
||||||
|
|
||||||
showid = int(curEpObj.show.indexerid)
|
showid = int(curEpObj.show.indexerid)
|
||||||
|
@ -57,8 +57,8 @@ def logSnatch(searchResult):
|
||||||
|
|
||||||
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
|
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
|
||||||
|
|
||||||
def logDownload(episode, filename, new_ep_quality, release_group=None):
|
|
||||||
|
|
||||||
|
def logDownload(episode, filename, new_ep_quality, release_group=None):
|
||||||
showid = int(episode.show.indexerid)
|
showid = int(episode.show.indexerid)
|
||||||
season = int(episode.season)
|
season = int(episode.season)
|
||||||
epNum = int(episode.episode)
|
epNum = int(episode.episode)
|
||||||
|
@ -75,8 +75,8 @@ def logDownload(episode, filename, new_ep_quality, release_group=None):
|
||||||
|
|
||||||
_logHistoryItem(action, showid, season, epNum, quality, filename, provider)
|
_logHistoryItem(action, showid, season, epNum, quality, filename, provider)
|
||||||
|
|
||||||
def logSubtitle(showid, season, episode, status, subtitleResult):
|
|
||||||
|
|
||||||
|
def logSubtitle(showid, season, episode, status, subtitleResult):
|
||||||
resource = subtitleResult.path
|
resource = subtitleResult.path
|
||||||
provider = subtitleResult.service
|
provider = subtitleResult.service
|
||||||
status, quality = Quality.splitCompositeStatus(status)
|
status, quality = Quality.splitCompositeStatus(status)
|
||||||
|
@ -84,8 +84,8 @@ def logSubtitle(showid, season, episode, status, subtitleResult):
|
||||||
|
|
||||||
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
|
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
|
||||||
|
|
||||||
def logFailed(indexerid, season, episode, status, release, provider=None):
|
|
||||||
|
|
||||||
|
def logFailed(indexerid, season, episode, status, release, provider=None):
|
||||||
showid = int(indexerid)
|
showid = int(indexerid)
|
||||||
season = int(season)
|
season = int(season)
|
||||||
epNum = int(episode)
|
epNum = int(episode)
|
||||||
|
|
|
@ -28,8 +28,8 @@ from sickbeard.metadata.generic import GenericMetadata
|
||||||
from lib.hachoir_parser import createParser
|
from lib.hachoir_parser import createParser
|
||||||
from lib.hachoir_metadata import extractMetadata
|
from lib.hachoir_metadata import extractMetadata
|
||||||
|
|
||||||
class ImageCache:
|
|
||||||
|
|
||||||
|
class ImageCache:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ class ImageCache:
|
||||||
Returns true if a cached poster exists for the given indexer id
|
Returns true if a cached poster exists for the given indexer id
|
||||||
"""
|
"""
|
||||||
poster_path = self.poster_path(indexer_id)
|
poster_path = self.poster_path(indexer_id)
|
||||||
logger.log(u"Checking if file "+str(poster_path)+" exists", logger.DEBUG)
|
logger.log(u"Checking if file " + str(poster_path) + " exists", logger.DEBUG)
|
||||||
return ek.ek(os.path.isfile, poster_path)
|
return ek.ek(os.path.isfile, poster_path)
|
||||||
|
|
||||||
def has_banner(self, indexer_id):
|
def has_banner(self, indexer_id):
|
||||||
|
@ -102,7 +102,7 @@ class ImageCache:
|
||||||
Returns true if a cached banner exists for the given indexer id
|
Returns true if a cached banner exists for the given indexer id
|
||||||
"""
|
"""
|
||||||
banner_path = self.banner_path(indexer_id)
|
banner_path = self.banner_path(indexer_id)
|
||||||
logger.log(u"Checking if file "+str(banner_path)+" exists", logger.DEBUG)
|
logger.log(u"Checking if file " + str(banner_path) + " exists", logger.DEBUG)
|
||||||
return ek.ek(os.path.isfile, banner_path)
|
return ek.ek(os.path.isfile, banner_path)
|
||||||
|
|
||||||
def has_poster_thumbnail(self, indexer_id):
|
def has_poster_thumbnail(self, indexer_id):
|
||||||
|
@ -110,7 +110,7 @@ class ImageCache:
|
||||||
Returns true if a cached poster thumbnail exists for the given indexer id
|
Returns true if a cached poster thumbnail exists for the given indexer id
|
||||||
"""
|
"""
|
||||||
poster_thumb_path = self.poster_thumb_path(indexer_id)
|
poster_thumb_path = self.poster_thumb_path(indexer_id)
|
||||||
logger.log(u"Checking if file "+str(poster_thumb_path)+" exists", logger.DEBUG)
|
logger.log(u"Checking if file " + str(poster_thumb_path) + " exists", logger.DEBUG)
|
||||||
return ek.ek(os.path.isfile, poster_thumb_path)
|
return ek.ek(os.path.isfile, poster_thumb_path)
|
||||||
|
|
||||||
def has_banner_thumbnail(self, indexer_id):
|
def has_banner_thumbnail(self, indexer_id):
|
||||||
|
@ -118,7 +118,7 @@ class ImageCache:
|
||||||
Returns true if a cached banner exists for the given indexer id
|
Returns true if a cached banner exists for the given indexer id
|
||||||
"""
|
"""
|
||||||
banner_thumb_path = self.banner_thumb_path(indexer_id)
|
banner_thumb_path = self.banner_thumb_path(indexer_id)
|
||||||
logger.log(u"Checking if file "+str(banner_thumb_path)+" exists", logger.DEBUG)
|
logger.log(u"Checking if file " + str(banner_thumb_path) + " exists", logger.DEBUG)
|
||||||
return ek.ek(os.path.isfile, banner_thumb_path)
|
return ek.ek(os.path.isfile, banner_thumb_path)
|
||||||
|
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ class ImageCache:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not ek.ek(os.path.isfile, path):
|
if not ek.ek(os.path.isfile, path):
|
||||||
logger.log(u"Couldn't check the type of "+str(path)+" cause it doesn't exist", logger.WARNING)
|
logger.log(u"Couldn't check the type of " + str(path) + " cause it doesn't exist", logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# use hachoir to parse the image for us
|
# use hachoir to parse the image for us
|
||||||
|
@ -145,10 +145,10 @@ class ImageCache:
|
||||||
img_metadata = extractMetadata(img_parser)
|
img_metadata = extractMetadata(img_parser)
|
||||||
|
|
||||||
if not img_metadata:
|
if not img_metadata:
|
||||||
logger.log(u"Unable to get metadata from "+str(path)+", not using your existing image", logger.DEBUG)
|
logger.log(u"Unable to get metadata from " + str(path) + ", not using your existing image", logger.DEBUG)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
img_ratio = float(img_metadata.get('width'))/float(img_metadata.get('height'))
|
img_ratio = float(img_metadata.get('width')) / float(img_metadata.get('height'))
|
||||||
|
|
||||||
img_parser.stream._input.close()
|
img_parser.stream._input.close()
|
||||||
|
|
||||||
|
@ -160,7 +160,7 @@ class ImageCache:
|
||||||
elif 5 < img_ratio < 6:
|
elif 5 < img_ratio < 6:
|
||||||
return self.BANNER
|
return self.BANNER
|
||||||
else:
|
else:
|
||||||
logger.log(u"Image has size ratio of "+str(img_ratio)+", unknown type", logger.WARNING)
|
logger.log(u"Image has size ratio of " + str(img_ratio) + ", unknown type", logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _cache_image_from_file(self, image_path, img_type, indexer_id):
|
def _cache_image_from_file(self, image_path, img_type, indexer_id):
|
||||||
|
@ -180,19 +180,19 @@ class ImageCache:
|
||||||
elif img_type == self.BANNER:
|
elif img_type == self.BANNER:
|
||||||
dest_path = self.banner_path(indexer_id)
|
dest_path = self.banner_path(indexer_id)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
|
logger.log(u"Invalid cache image type: " + str(img_type), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# make sure the cache folder exists before we try copying to it
|
# make sure the cache folder exists before we try copying to it
|
||||||
if not ek.ek(os.path.isdir, self._cache_dir()):
|
if not ek.ek(os.path.isdir, self._cache_dir()):
|
||||||
logger.log(u"Image cache dir didn't exist, creating it at "+str(self._cache_dir()))
|
logger.log(u"Image cache dir didn't exist, creating it at " + str(self._cache_dir()))
|
||||||
ek.ek(os.makedirs, self._cache_dir())
|
ek.ek(os.makedirs, self._cache_dir())
|
||||||
|
|
||||||
if not ek.ek(os.path.isdir, self._thumbnails_dir()):
|
if not ek.ek(os.path.isdir, self._thumbnails_dir()):
|
||||||
logger.log(u"Thumbnails cache dir didn't exist, creating it at "+str(self._thumbnails_dir()))
|
logger.log(u"Thumbnails cache dir didn't exist, creating it at " + str(self._thumbnails_dir()))
|
||||||
ek.ek(os.makedirs, self._thumbnails_dir())
|
ek.ek(os.makedirs, self._thumbnails_dir())
|
||||||
|
|
||||||
logger.log(u"Copying from "+image_path+" to "+dest_path)
|
logger.log(u"Copying from " + image_path + " to " + dest_path)
|
||||||
helpers.copyFile(image_path, dest_path)
|
helpers.copyFile(image_path, dest_path)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -221,7 +221,7 @@ class ImageCache:
|
||||||
img_type_name = 'banner_thumb'
|
img_type_name = 'banner_thumb'
|
||||||
dest_path = self.banner_thumb_path(show_obj.indexerid)
|
dest_path = self.banner_thumb_path(show_obj.indexerid)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Invalid cache image type: "+str(img_type), logger.ERROR)
|
logger.log(u"Invalid cache image type: " + str(img_type), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# retrieve the image from indexer using the generic metadata class
|
# retrieve the image from indexer using the generic metadata class
|
||||||
|
@ -240,7 +240,7 @@ class ImageCache:
|
||||||
show_obj: TVShow object to cache images for
|
show_obj: TVShow object to cache images for
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.log(u"Checking if we need any cache images for show "+str(show_obj.indexerid), logger.DEBUG)
|
logger.log(u"Checking if we need any cache images for show " + str(show_obj.indexerid), logger.DEBUG)
|
||||||
|
|
||||||
# check if the images are already cached or not
|
# check if the images are already cached or not
|
||||||
need_images = {self.POSTER: not self.has_poster(show_obj.indexerid),
|
need_images = {self.POSTER: not self.has_poster(show_obj.indexerid),
|
||||||
|
@ -248,7 +248,8 @@ class ImageCache:
|
||||||
self.POSTER_THUMB: not self.has_poster_thumbnail(show_obj.indexerid),
|
self.POSTER_THUMB: not self.has_poster_thumbnail(show_obj.indexerid),
|
||||||
self.BANNER_THUMB: not self.has_banner_thumbnail(show_obj.indexerid)}
|
self.BANNER_THUMB: not self.has_banner_thumbnail(show_obj.indexerid)}
|
||||||
|
|
||||||
if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not need_images[self.BANNER_THUMB]:
|
if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not \
|
||||||
|
need_images[self.BANNER_THUMB]:
|
||||||
logger.log(u"No new cache images needed, not retrieving new ones")
|
logger.log(u"No new cache images needed, not retrieving new ones")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -256,19 +257,24 @@ class ImageCache:
|
||||||
if need_images[self.POSTER] or need_images[self.BANNER]:
|
if need_images[self.POSTER] or need_images[self.BANNER]:
|
||||||
try:
|
try:
|
||||||
for cur_provider in sickbeard.metadata_provider_dict.values():
|
for cur_provider in sickbeard.metadata_provider_dict.values():
|
||||||
logger.log(u"Checking if we can use the show image from the "+cur_provider.name+" metadata", logger.DEBUG)
|
logger.log(u"Checking if we can use the show image from the " + cur_provider.name + " metadata",
|
||||||
|
logger.DEBUG)
|
||||||
if ek.ek(os.path.isfile, cur_provider.get_poster_path(show_obj)):
|
if ek.ek(os.path.isfile, cur_provider.get_poster_path(show_obj)):
|
||||||
cur_file_name = os.path.abspath(cur_provider.get_poster_path(show_obj))
|
cur_file_name = os.path.abspath(cur_provider.get_poster_path(show_obj))
|
||||||
cur_file_type = self.which_type(cur_file_name)
|
cur_file_type = self.which_type(cur_file_name)
|
||||||
|
|
||||||
if cur_file_type == None:
|
if cur_file_type == None:
|
||||||
logger.log(u"Unable to retrieve image type, not using the image from "+str(cur_file_name), logger.WARNING)
|
logger.log(u"Unable to retrieve image type, not using the image from " + str(cur_file_name),
|
||||||
|
logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logger.log(u"Checking if image "+cur_file_name+" (type "+str(cur_file_type)+" needs metadata: "+str(need_images[cur_file_type]), logger.DEBUG)
|
logger.log(u"Checking if image " + cur_file_name + " (type " + str(
|
||||||
|
cur_file_type) + " needs metadata: " + str(need_images[cur_file_type]), logger.DEBUG)
|
||||||
|
|
||||||
if cur_file_type in need_images and need_images[cur_file_type]:
|
if cur_file_type in need_images and need_images[cur_file_type]:
|
||||||
logger.log(u"Found an image in the show dir that doesn't exist in the cache, caching it: "+cur_file_name+", type "+str(cur_file_type), logger.DEBUG)
|
logger.log(
|
||||||
|
u"Found an image in the show dir that doesn't exist in the cache, caching it: " + cur_file_name + ", type " + str(
|
||||||
|
cur_file_type), logger.DEBUG)
|
||||||
self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.indexerid)
|
self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.indexerid)
|
||||||
need_images[cur_file_type] = False
|
need_images[cur_file_type] = False
|
||||||
except exceptions.ShowDirNotFoundException:
|
except exceptions.ShowDirNotFoundException:
|
||||||
|
@ -276,9 +282,9 @@ class ImageCache:
|
||||||
|
|
||||||
# download from indexer for missing ones
|
# download from indexer for missing ones
|
||||||
for cur_image_type in [self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB]:
|
for cur_image_type in [self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB]:
|
||||||
logger.log(u"Seeing if we still need an image of type "+str(cur_image_type)+": "+str(need_images[cur_image_type]), logger.DEBUG)
|
logger.log(u"Seeing if we still need an image of type " + str(cur_image_type) + ": " + str(
|
||||||
|
need_images[cur_image_type]), logger.DEBUG)
|
||||||
if cur_image_type in need_images and need_images[cur_image_type]:
|
if cur_image_type in need_images and need_images[cur_image_type]:
|
||||||
self._cache_image_from_indexer(show_obj, cur_image_type)
|
self._cache_image_from_indexer(show_obj, cur_image_type)
|
||||||
|
|
||||||
|
|
||||||
logger.log(u"Done cache check")
|
logger.log(u"Done cache check")
|
||||||
|
|
|
@ -16,20 +16,4 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
__all__ = ["generic","indexer_api","indexer_exceptions"]
|
from . import indexer_api, indexer_exceptions
|
||||||
|
|
||||||
import indexer_api, indexer_exceptions
|
|
||||||
|
|
||||||
def getClientModule(name):
|
|
||||||
|
|
||||||
name = name.lower()
|
|
||||||
prefix = "sickbeard.indexers."
|
|
||||||
|
|
||||||
return __import__(prefix+name, fromlist=__all__)
|
|
||||||
|
|
||||||
def getClientIstance(name):
|
|
||||||
|
|
||||||
module = getClientModule(name)
|
|
||||||
className = module.__class__.__name__
|
|
||||||
|
|
||||||
return getattr(module, className)
|
|
|
@ -1,66 +0,0 @@
|
||||||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
||||||
# URL: http://code.google.com/p/sickbeard/
|
|
||||||
#
|
|
||||||
# This file is part of Sick Beard.
|
|
||||||
#
|
|
||||||
# Sick Beard is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Sick Beard is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
import os
|
|
||||||
|
|
||||||
import sickbeard
|
|
||||||
|
|
||||||
class GenericIndexer(object):
|
|
||||||
def __init__(self, indexer):
|
|
||||||
|
|
||||||
INDEXER_TVDB = 'Tvdb'
|
|
||||||
INDEXER_TVRAGE = 'TVRage'
|
|
||||||
|
|
||||||
INDEXERS = {}
|
|
||||||
INDEXERS[INDEXER_TVDB] = 'theTVDB'
|
|
||||||
INDEXERS[INDEXER_TVRAGE] = 'TVRage'
|
|
||||||
|
|
||||||
INDEXER_API_KEY = {}
|
|
||||||
INDEXER_API_KEY[INDEXER_TVDB] = '9DAF49C96CBF8DAC'
|
|
||||||
INDEXER_API_KEY[INDEXER_TVRAGE] = 'Uhewg1Rr0o62fvZvUIZt'
|
|
||||||
|
|
||||||
INDEXER_BASEURL = {}
|
|
||||||
INDEXER_BASEURL[INDEXER_TVDB] = 'http://thetvdb.com/api/' + INDEXER_API_KEY[INDEXER_TVDB] + '/series/'
|
|
||||||
INDEXER_BASEURL[INDEXER_TVRAGE] = 'http://tvrage.com/showinfo?key=' + INDEXER_API_KEY[INDEXER_TVRAGE] + 'sid='
|
|
||||||
|
|
||||||
INDEXER_API_PARMS = {}
|
|
||||||
INDEXER_API_PARMS[INDEXER_TVDB] = {'apikey': INDEXER_API_KEY[INDEXER_TVDB],
|
|
||||||
'language': 'en',
|
|
||||||
'useZip': True}
|
|
||||||
|
|
||||||
INDEXER_API_PARMS[INDEXER_TVRAGE] = {'apikey': INDEXER_API_KEY[INDEXER_TVRAGE],
|
|
||||||
'language': 'en'}
|
|
||||||
|
|
||||||
self.config = {}
|
|
||||||
self.config['valid_languages'] = [
|
|
||||||
"da", "fi", "nl", "de", "it", "es", "fr","pl", "hu","el","tr",
|
|
||||||
"ru","he","ja","pt","zh","cs","sl", "hr","ko","en","sv","no"]
|
|
||||||
|
|
||||||
self.config['langabbv_to_id'] = {'el': 20, 'en': 7, 'zh': 27,
|
|
||||||
'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9,
|
|
||||||
'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11,
|
|
||||||
'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30}
|
|
||||||
|
|
||||||
self.indexers = [x for x in INDEXERS]
|
|
||||||
|
|
||||||
if indexer in INDEXERS:
|
|
||||||
self.base_url = INDEXER_BASEURL[indexer]
|
|
||||||
self.api_parms = INDEXER_API_PARMS[indexer]
|
|
||||||
self.name = INDEXERS[indexer]
|
|
||||||
|
|
||||||
if sickbeard.CACHE_DIR:
|
|
||||||
self.cache = os.path.join(sickbeard.CACHE_DIR, indexer)
|
|
|
@ -16,27 +16,31 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import os
|
import os
|
||||||
import datetime
|
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import generic
|
|
||||||
|
|
||||||
from indexer_exceptions import indexer_attributenotfound
|
from indexer_config import initConfig, indexerConfig
|
||||||
from lib.tvdb_api.tvdb_api import Tvdb
|
|
||||||
from lib.tvrage_api.tvrage_api import TVRage
|
|
||||||
|
|
||||||
class indexerApi(generic.GenericIndexer):
|
|
||||||
|
class indexerApi(object):
|
||||||
def __init__(self, indexer=None, *args, **kwargs):
|
def __init__(self, indexer=None, *args, **kwargs):
|
||||||
generic.GenericIndexer.__init__(self, indexer)
|
self._wrapped = object
|
||||||
|
self.config = initConfig
|
||||||
|
self.indexers = {k: v if k is 'id' else v['name'] for k, v in indexerConfig.items()}
|
||||||
|
|
||||||
if indexer in self.indexers:
|
if indexer in indexerConfig:
|
||||||
self.api_parms.update(**kwargs)
|
self.name = indexerConfig[indexer]['name']
|
||||||
|
self.config = indexerConfig[indexer]
|
||||||
|
|
||||||
if sickbeard.CACHE_DIR:
|
# set cache if exists
|
||||||
self.api_parms['cache'] = self.cache
|
if sickbeard.CACHE_DIR: indexerConfig[indexer]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR,
|
||||||
|
self.name)
|
||||||
|
|
||||||
|
if kwargs:
|
||||||
|
# update API params
|
||||||
|
indexerConfig[indexer]['api_params'].update(**kwargs)
|
||||||
|
|
||||||
# wrap the indexer API object and return it back
|
# wrap the indexer API object and return it back
|
||||||
self._wrapped = eval(indexer)(*args, **self.api_parms)
|
self._wrapped = indexerConfig[indexer]['module'](**indexerConfig[indexer]['api_params'])
|
||||||
|
|
||||||
def __getattr__(self, attr):
|
def __getattr__(self, attr):
|
||||||
return getattr(self._wrapped, attr)
|
return getattr(self._wrapped, attr)
|
||||||
|
|
51
sickbeard/indexers/indexer_config.py
Normal file
51
sickbeard/indexers/indexer_config.py
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
from lib.tvdb_api.tvdb_api import Tvdb
|
||||||
|
from lib.tvrage_api.tvrage_api import TVRage
|
||||||
|
|
||||||
|
INDEXER_TVDB = 1
|
||||||
|
INDEXER_TVRAGE = 2
|
||||||
|
|
||||||
|
initConfig = {}
|
||||||
|
indexerConfig = {}
|
||||||
|
|
||||||
|
initConfig['valid_languages'] = [
|
||||||
|
"da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr",
|
||||||
|
"ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no"]
|
||||||
|
|
||||||
|
initConfig['langabbv_to_id'] = {
|
||||||
|
'el': 20, 'en': 7, 'zh': 27,
|
||||||
|
'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9,
|
||||||
|
'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11,
|
||||||
|
'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30}
|
||||||
|
|
||||||
|
indexerConfig[INDEXER_TVDB] = {
|
||||||
|
'id': INDEXER_TVDB,
|
||||||
|
'name': 'theTVDB',
|
||||||
|
'module': Tvdb,
|
||||||
|
'api_params': {'apikey': '9DAF49C96CBF8DAC',
|
||||||
|
'language': 'en',
|
||||||
|
'useZip': True
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
indexerConfig[INDEXER_TVRAGE] = {
|
||||||
|
'id': INDEXER_TVRAGE,
|
||||||
|
'name': 'TVRage',
|
||||||
|
'module': TVRage,
|
||||||
|
'api_params': {'apikey': 'Uhewg1Rr0o62fvZvUIZt',
|
||||||
|
'language': 'en'
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# TVDB Indexer Settings
|
||||||
|
indexerConfig[INDEXER_TVDB]['xem_origin'] = 'tvdb'
|
||||||
|
indexerConfig[INDEXER_TVDB]['icon'] = 'thetvdb16.png'
|
||||||
|
indexerConfig[INDEXER_TVDB]['scene_url'] = 'http://midgetspy.github.com/sb_tvdb_scene_exceptions/exceptions.txt'
|
||||||
|
indexerConfig[INDEXER_TVDB]['show_url'] = 'http://thetvdb.com/?tab=series&id='
|
||||||
|
indexerConfig[INDEXER_TVDB]['base_url'] = 'http://thetvdb.com/api/%(apikey)s/series/' % indexerConfig[INDEXER_TVDB]['api_params']
|
||||||
|
|
||||||
|
# TVRAGE Indexer Settings
|
||||||
|
indexerConfig[INDEXER_TVRAGE]['xem_origin'] = 'rage'
|
||||||
|
indexerConfig[INDEXER_TVRAGE]['icon'] = 'tvrage16.png'
|
||||||
|
indexerConfig[INDEXER_TVRAGE]['scene_url'] = 'http://raw.github.com/echel0n/sb_tvrage_scene_exceptions/master/exceptions.txt'
|
||||||
|
indexerConfig[INDEXER_TVRAGE]['show_url'] = 'http://tvrage.com/shows/id-'
|
||||||
|
indexerConfig[INDEXER_TVRAGE]['base_url'] = 'http://tvrage.com/showinfo.php?key=%(apikey)s&sid=' % indexerConfig[INDEXER_TVRAGE]['api_params']
|
|
@ -19,13 +19,13 @@ from lib.tvdb_api.tvdb_exceptions import \
|
||||||
tvdb_seasonnotfound, tvdb_shownotfound, tvdb_userabort
|
tvdb_seasonnotfound, tvdb_shownotfound, tvdb_userabort
|
||||||
|
|
||||||
indexerExcepts = ["indexer_exception", "indexer_error", "indexer_userabort", "indexer_shownotfound",
|
indexerExcepts = ["indexer_exception", "indexer_error", "indexer_userabort", "indexer_shownotfound",
|
||||||
"indexer_seasonnotfound", "indexer_episodenotfound", "indexer_attributenotfound"]
|
"indexer_seasonnotfound", "indexer_episodenotfound", "indexer_attributenotfound"]
|
||||||
|
|
||||||
tvdbExcepts = ["tvdb_exception", "tvdb_error", "tvdb_userabort", "tvdb_shownotfound",
|
tvdbExcepts = ["tvdb_exception", "tvdb_error", "tvdb_userabort", "tvdb_shownotfound",
|
||||||
"tvdb_seasonnotfound", "tvdb_episodenotfound", "tvdb_attributenotfound"]
|
"tvdb_seasonnotfound", "tvdb_episodenotfound", "tvdb_attributenotfound"]
|
||||||
|
|
||||||
tvrageExcepts = ["tvdb_exception", "tvrage_error", "tvrage_userabort", "tvrage_shownotfound",
|
tvrageExcepts = ["tvdb_exception", "tvrage_error", "tvrage_userabort", "tvrage_shownotfound",
|
||||||
"tvrage_seasonnotfound", "tvrage_episodenotfound", "tvrage_attributenotfound"]
|
"tvrage_seasonnotfound", "tvrage_episodenotfound", "tvrage_attributenotfound"]
|
||||||
|
|
||||||
# link API exceptions to our exception handler
|
# link API exceptions to our exception handler
|
||||||
indexer_exception = tvdb_exception, tvrage_exception
|
indexer_exception = tvdb_exception, tvrage_exception
|
||||||
|
|
|
@ -3,33 +3,99 @@ from __future__ import with_statement
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
import datetime
|
||||||
import os.path
|
import os.path
|
||||||
|
import string
|
||||||
|
|
||||||
sys.path.append(os.path.abspath('..'))
|
sys.path.append(os.path.abspath('..'))
|
||||||
sys.path.append(os.path.abspath('../../../lib'))
|
sys.path.append(os.path.abspath('../../../lib'))
|
||||||
|
|
||||||
from sickbeard.indexers.indexer_api import indexerApi
|
import sickbeard
|
||||||
from sickbeard.indexers.indexer_exceptions import indexer_exception
|
import itertools
|
||||||
|
|
||||||
|
from itertools import chain
|
||||||
|
from sickbeard import classes
|
||||||
|
|
||||||
|
|
||||||
class APICheck(unittest.TestCase):
|
class APICheck(unittest.TestCase):
|
||||||
indexer_id = 81189
|
indexer = u'3'
|
||||||
indexer = 'Tvdb'
|
|
||||||
|
for i in int([indexer]) and sickbeard.indexerApi().indexers:
|
||||||
|
print i
|
||||||
|
|
||||||
|
global indexer, keywords, nameUTF8
|
||||||
|
|
||||||
|
indexer = 0
|
||||||
|
name = 'american dad'
|
||||||
lang = "en"
|
lang = "en"
|
||||||
|
|
||||||
# Set our common indexer_api options here
|
if not lang or lang == 'null':
|
||||||
INDEXER_API_PARMS = {'indexer': indexer}
|
lang = "en"
|
||||||
lindexer_api_parms = INDEXER_API_PARMS.copy()
|
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
nameUTF8 = name.encode('utf-8')
|
||||||
|
|
||||||
|
# Use each word in the show's name as a possible search term
|
||||||
|
keywords = nameUTF8.split(' ')
|
||||||
|
|
||||||
|
# Insert the whole show's name as the first search term so best results are first
|
||||||
|
# ex: keywords = ['Some Show Name', 'Some', 'Show', 'Name']
|
||||||
|
if len(keywords) > 1:
|
||||||
|
keywords.insert(0, nameUTF8)
|
||||||
|
|
||||||
|
|
||||||
|
# check for indexer preset
|
||||||
|
indexers = [int(indexer)]
|
||||||
|
if 0 in indexers:
|
||||||
|
indexers = sickbeard.indexerApi().indexers
|
||||||
|
|
||||||
|
# Query Indexers for each search term and build the list of results
|
||||||
|
for i in indexers:
|
||||||
|
def searchShows(i):
|
||||||
|
results = []
|
||||||
|
|
||||||
|
lINDEXER_API_PARMS = {'indexer': i}
|
||||||
|
lINDEXER_API_PARMS['custom_ui'] = classes.AllShowsListUI
|
||||||
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
|
for searchTerm in keywords:
|
||||||
try:
|
try:
|
||||||
lang_id = indexerApi().config['langabbv_to_id'][lang]
|
search = t[searchTerm]
|
||||||
t = indexerApi(cache=True, **lindexer_api_parms)
|
if isinstance(search, dict):
|
||||||
myEp = t[indexer_id]
|
search = [search]
|
||||||
|
|
||||||
if getattr(myEp, 'seriesname', None) is not None:
|
# add search results
|
||||||
print "FOUND"
|
result = [
|
||||||
|
[t.name, t.config['id'], t.config["show_url"], int(x['id']), x['seriesname'], x['firstaired']]
|
||||||
|
for x in search if nameUTF8.lower() in x['seriesname'].lower()]
|
||||||
|
|
||||||
except indexer_exception as e:
|
# see if we have any matches
|
||||||
print e
|
if len(result) > 0:
|
||||||
pass
|
# add result to list of found shows
|
||||||
|
results += result
|
||||||
|
|
||||||
if __name__ == "__main__":
|
# search through result to see if we have a exact match
|
||||||
|
for show in result:
|
||||||
|
# cleanup the series name
|
||||||
|
seriesname = show[4].encode('utf-8').translate(None, string.punctuation)
|
||||||
|
|
||||||
|
# check if we got a exact match
|
||||||
|
if nameUTF8.lower() == seriesname.lower():
|
||||||
|
return results
|
||||||
|
|
||||||
|
except Exception, e:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# finished searching a indexer so return the results
|
||||||
|
return results
|
||||||
|
|
||||||
|
# search indexers for shows
|
||||||
|
results += searchShows(i)
|
||||||
|
|
||||||
|
# remove duplicates
|
||||||
|
results = list(results for results, _ in itertools.groupby(results))
|
||||||
|
print results
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
|
@ -25,6 +25,7 @@ import sqlite3
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
sys.path.append(os.path.abspath('..'))
|
sys.path.append(os.path.abspath('..'))
|
||||||
sys.path.append(os.path.abspath('../lib'))
|
sys.path.append(os.path.abspath('../lib'))
|
||||||
|
|
||||||
|
@ -43,7 +44,6 @@ TESTDIR = os.path.abspath('.')
|
||||||
TESTDBNAME = "sickbeard.db"
|
TESTDBNAME = "sickbeard.db"
|
||||||
TESTCACHEDBNAME = "cache.db"
|
TESTCACHEDBNAME = "cache.db"
|
||||||
|
|
||||||
|
|
||||||
SHOWNAME = u"show name"
|
SHOWNAME = u"show name"
|
||||||
SEASON = 4
|
SEASON = 4
|
||||||
EPISODE = 2
|
EPISODE = 2
|
||||||
|
@ -78,9 +78,9 @@ sickbeard.NAMING_PATTERN = ''
|
||||||
sickbeard.NAMING_ABD_PATTERN = ''
|
sickbeard.NAMING_ABD_PATTERN = ''
|
||||||
sickbeard.NAMING_MULTI_EP = 1
|
sickbeard.NAMING_MULTI_EP = 1
|
||||||
|
|
||||||
|
|
||||||
sickbeard.PROVIDER_ORDER = ["sick_beard_index"]
|
sickbeard.PROVIDER_ORDER = ["sick_beard_index"]
|
||||||
sickbeard.newznabProviderList = providers.getNewznabProviderList("Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0!!!NZBs.org|http://nzbs.org/||5030,5040,5070,5090|0!!!Usenet-Crawler|http://www.usenet-crawler.com/||5030,5040|0")
|
sickbeard.newznabProviderList = providers.getNewznabProviderList(
|
||||||
|
"Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0!!!NZBs.org|http://nzbs.org/||5030,5040,5070,5090|0!!!Usenet-Crawler|http://www.usenet-crawler.com/||5030,5040|0")
|
||||||
sickbeard.providerList = providers.makeProviderList()
|
sickbeard.providerList = providers.makeProviderList()
|
||||||
|
|
||||||
sickbeard.PROG_DIR = os.path.abspath('..')
|
sickbeard.PROG_DIR = os.path.abspath('..')
|
||||||
|
@ -95,6 +95,7 @@ sickbeard.logger.sb_log_instance.initLogging(False)
|
||||||
#=================
|
#=================
|
||||||
def _dummy_saveConfig():
|
def _dummy_saveConfig():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# this overrides the sickbeard save_config which gets called during a db upgrade
|
# this overrides the sickbeard save_config which gets called during a db upgrade
|
||||||
# this might be considered a hack
|
# this might be considered a hack
|
||||||
mainDB.sickbeard.save_config = _dummy_saveConfig
|
mainDB.sickbeard.save_config = _dummy_saveConfig
|
||||||
|
@ -104,6 +105,7 @@ mainDB.sickbeard.save_config = _dummy_saveConfig
|
||||||
def _fake_specifyEP(self, season, episode):
|
def _fake_specifyEP(self, season, episode):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
sickbeard.tv.TVEpisode.specifyEpisode = _fake_specifyEP
|
sickbeard.tv.TVEpisode.specifyEpisode = _fake_specifyEP
|
||||||
|
|
||||||
|
|
||||||
|
@ -125,14 +127,12 @@ class SickbeardTestDBCase(unittest.TestCase):
|
||||||
|
|
||||||
|
|
||||||
class TestDBConnection(db.DBConnection, object):
|
class TestDBConnection(db.DBConnection, object):
|
||||||
|
|
||||||
def __init__(self, dbFileName=TESTDBNAME):
|
def __init__(self, dbFileName=TESTDBNAME):
|
||||||
dbFileName = os.path.join(TESTDIR, dbFileName)
|
dbFileName = os.path.join(TESTDIR, dbFileName)
|
||||||
super(TestDBConnection, self).__init__(dbFileName)
|
super(TestDBConnection, self).__init__(dbFileName)
|
||||||
|
|
||||||
|
|
||||||
class TestCacheDBConnection(TestDBConnection, object):
|
class TestCacheDBConnection(TestDBConnection, object):
|
||||||
|
|
||||||
def __init__(self, providerName):
|
def __init__(self, providerName):
|
||||||
db.DBConnection.__init__(self, os.path.join(TESTDIR, TESTCACHEDBNAME))
|
db.DBConnection.__init__(self, os.path.join(TESTDIR, TESTCACHEDBNAME))
|
||||||
|
|
||||||
|
@ -210,6 +210,7 @@ def setUp_test_show_dir():
|
||||||
def tearDown_test_show_dir():
|
def tearDown_test_show_dir():
|
||||||
shutil.rmtree(SHOWDIR)
|
shutil.rmtree(SHOWDIR)
|
||||||
|
|
||||||
|
|
||||||
tearDown_test_db()
|
tearDown_test_db()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -45,11 +45,10 @@ reverseNames = {u'ERROR': ERROR,
|
||||||
u'WARNING': WARNING,
|
u'WARNING': WARNING,
|
||||||
u'INFO': MESSAGE,
|
u'INFO': MESSAGE,
|
||||||
u'DEBUG': DEBUG,
|
u'DEBUG': DEBUG,
|
||||||
u'DB' : DB}
|
u'DB': DB}
|
||||||
|
|
||||||
|
|
||||||
class SBRotatingLogHandler(object):
|
class SBRotatingLogHandler(object):
|
||||||
|
|
||||||
def __init__(self, log_file, num_files, num_bytes):
|
def __init__(self, log_file, num_files, num_bytes):
|
||||||
self.num_files = num_files
|
self.num_files = num_files
|
||||||
self.num_bytes = num_bytes
|
self.num_bytes = num_bytes
|
||||||
|
@ -92,7 +91,7 @@ class SBRotatingLogHandler(object):
|
||||||
else:
|
else:
|
||||||
|
|
||||||
#Add a new logging level DB
|
#Add a new logging level DB
|
||||||
logging.addLevelName(5,'DB')
|
logging.addLevelName(5, 'DB')
|
||||||
|
|
||||||
# only start consoleLogging on first initialize
|
# only start consoleLogging on first initialize
|
||||||
if self.console_logging:
|
if self.console_logging:
|
||||||
|
@ -102,11 +101,13 @@ class SBRotatingLogHandler(object):
|
||||||
console.setLevel(logging.INFO)
|
console.setLevel(logging.INFO)
|
||||||
|
|
||||||
# set a format which is simpler for console use
|
# set a format which is simpler for console use
|
||||||
console.setFormatter(DispatchingFormatter({'sickbeard' : logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'),
|
console.setFormatter(DispatchingFormatter(
|
||||||
'subliminal' : logging.Formatter('%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s', '%H:%M:%S'),
|
{'sickbeard': logging.Formatter('%(asctime)s %(levelname)s::%(message)s', '%H:%M:%S'),
|
||||||
'imdbpy' : logging.Formatter('%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S')
|
'subliminal': logging.Formatter('%(asctime)s %(levelname)s::SUBLIMINAL :: %(message)s',
|
||||||
|
'%H:%M:%S'),
|
||||||
|
'imdbpy': logging.Formatter('%(asctime)s %(levelname)s::IMDBPY :: %(message)s', '%H:%M:%S')
|
||||||
},
|
},
|
||||||
logging.Formatter('%(message)s'),))
|
logging.Formatter('%(message)s'), ))
|
||||||
|
|
||||||
# add the handler to the root logger
|
# add the handler to the root logger
|
||||||
logging.getLogger('sickbeard').addHandler(console)
|
logging.getLogger('sickbeard').addHandler(console)
|
||||||
|
@ -127,14 +128,14 @@ class SBRotatingLogHandler(object):
|
||||||
# already logging in new log folder, close the old handler
|
# already logging in new log folder, close the old handler
|
||||||
if old_handler:
|
if old_handler:
|
||||||
self.close_log(old_handler)
|
self.close_log(old_handler)
|
||||||
# old_handler.flush()
|
# old_handler.flush()
|
||||||
# old_handler.close()
|
# old_handler.close()
|
||||||
# sb_logger = logging.getLogger('sickbeard')
|
# sb_logger = logging.getLogger('sickbeard')
|
||||||
# sub_logger = logging.getLogger('subliminal')
|
# sub_logger = logging.getLogger('subliminal')
|
||||||
# imdb_logger = logging.getLogger('imdbpy')
|
# imdb_logger = logging.getLogger('imdbpy')
|
||||||
# sb_logger.removeHandler(old_handler)
|
# sb_logger.removeHandler(old_handler)
|
||||||
# subli_logger.removeHandler(old_handler)
|
# subli_logger.removeHandler(old_handler)
|
||||||
# imdb_logger.removeHandler(old_handler)
|
# imdb_logger.removeHandler(old_handler)
|
||||||
|
|
||||||
def _config_handler(self):
|
def _config_handler(self):
|
||||||
"""
|
"""
|
||||||
|
@ -143,11 +144,13 @@ class SBRotatingLogHandler(object):
|
||||||
|
|
||||||
file_handler = logging.FileHandler(self.log_file_path, encoding='utf-8')
|
file_handler = logging.FileHandler(self.log_file_path, encoding='utf-8')
|
||||||
file_handler.setLevel(DB)
|
file_handler.setLevel(DB)
|
||||||
file_handler.setFormatter(DispatchingFormatter({'sickbeard' : logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'),
|
file_handler.setFormatter(DispatchingFormatter(
|
||||||
'subliminal' : logging.Formatter('%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s', '%Y-%m-%d %H:%M:%S'),
|
{'sickbeard': logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%Y-%m-%d %H:%M:%S'),
|
||||||
'imdbpy' : logging.Formatter('%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S')
|
'subliminal': logging.Formatter('%(asctime)s %(levelname)-8s SUBLIMINAL :: %(message)s',
|
||||||
|
'%Y-%m-%d %H:%M:%S'),
|
||||||
|
'imdbpy': logging.Formatter('%(asctime)s %(levelname)-8s IMDBPY :: %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||||
},
|
},
|
||||||
logging.Formatter('%(message)s'),))
|
logging.Formatter('%(message)s'), ))
|
||||||
|
|
||||||
return file_handler
|
return file_handler
|
||||||
|
|
||||||
|
@ -254,7 +257,6 @@ class SBRotatingLogHandler(object):
|
||||||
|
|
||||||
|
|
||||||
class DispatchingFormatter:
|
class DispatchingFormatter:
|
||||||
|
|
||||||
def __init__(self, formatters, default_formatter):
|
def __init__(self, formatters, default_formatter):
|
||||||
self._formatters = formatters
|
self._formatters = formatters
|
||||||
self._default_formatter = default_formatter
|
self._default_formatter = default_formatter
|
||||||
|
@ -266,11 +268,14 @@ class DispatchingFormatter:
|
||||||
|
|
||||||
sb_log_instance = SBRotatingLogHandler('sickbeard.log', NUM_LOGS, LOG_SIZE)
|
sb_log_instance = SBRotatingLogHandler('sickbeard.log', NUM_LOGS, LOG_SIZE)
|
||||||
|
|
||||||
|
|
||||||
def log(toLog, logLevel=MESSAGE):
|
def log(toLog, logLevel=MESSAGE):
|
||||||
sb_log_instance.log(toLog, logLevel)
|
sb_log_instance.log(toLog, logLevel)
|
||||||
|
|
||||||
|
|
||||||
def log_error_and_exit(error_msg):
|
def log_error_and_exit(error_msg):
|
||||||
sb_log_instance.log_error_and_exit(error_msg)
|
sb_log_instance.log_error_and_exit(error_msg)
|
||||||
|
|
||||||
|
|
||||||
def close():
|
def close():
|
||||||
sb_log_instance.close_log()
|
sb_log_instance.close_log()
|
|
@ -21,19 +21,21 @@ __all__ = ['generic', 'helpers', 'xbmc', 'xbmc_12plus', 'mediabrowser', 'ps3', '
|
||||||
import sys
|
import sys
|
||||||
import xbmc, xbmc_12plus, mediabrowser, ps3, wdtv, tivo
|
import xbmc, xbmc_12plus, mediabrowser, ps3, wdtv, tivo
|
||||||
|
|
||||||
|
|
||||||
def available_generators():
|
def available_generators():
|
||||||
return filter(lambda x: x not in ('generic', 'helpers'), __all__)
|
return filter(lambda x: x not in ('generic', 'helpers'), __all__)
|
||||||
|
|
||||||
|
|
||||||
def _getMetadataModule(name):
|
def _getMetadataModule(name):
|
||||||
name = name.lower()
|
name = name.lower()
|
||||||
prefix = "sickbeard.metadata."
|
prefix = "sickbeard.metadata."
|
||||||
if name in __all__ and prefix+name in sys.modules:
|
if name in __all__ and prefix + name in sys.modules:
|
||||||
return sys.modules[prefix+name]
|
return sys.modules[prefix + name]
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _getMetadataClass(name):
|
|
||||||
|
|
||||||
|
def _getMetadataClass(name):
|
||||||
module = _getMetadataModule(name)
|
module = _getMetadataModule(name)
|
||||||
|
|
||||||
if not module:
|
if not module:
|
||||||
|
@ -41,6 +43,7 @@ def _getMetadataClass(name):
|
||||||
|
|
||||||
return module.metadata_class()
|
return module.metadata_class()
|
||||||
|
|
||||||
|
|
||||||
def get_metadata_generator_dict():
|
def get_metadata_generator_dict():
|
||||||
result = {}
|
result = {}
|
||||||
for cur_generator_id in available_generators():
|
for cur_generator_id in available_generators():
|
||||||
|
|
|
@ -34,7 +34,7 @@ from sickbeard.exceptions import ex
|
||||||
from sickbeard.show_name_helpers import allPossibleShowNames
|
from sickbeard.show_name_helpers import allPossibleShowNames
|
||||||
|
|
||||||
from lib.tmdb_api.tmdb_api import TMDB
|
from lib.tmdb_api.tmdb_api import TMDB
|
||||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
|
||||||
|
|
||||||
class GenericMetadata():
|
class GenericMetadata():
|
||||||
"""
|
"""
|
||||||
|
@ -88,7 +88,9 @@ class GenericMetadata():
|
||||||
self.season_all_banner = season_all_banner
|
self.season_all_banner = season_all_banner
|
||||||
|
|
||||||
def get_config(self):
|
def get_config(self):
|
||||||
config_list = [self.show_metadata, self.episode_metadata, self.fanart, self.poster, self.banner, self.episode_thumbnails, self.season_posters, self.season_banners, self.season_all_poster, self.season_all_banner]
|
config_list = [self.show_metadata, self.episode_metadata, self.fanart, self.poster, self.banner,
|
||||||
|
self.episode_thumbnails, self.season_posters, self.season_banners, self.season_all_poster,
|
||||||
|
self.season_all_banner]
|
||||||
return '|'.join([str(int(x)) for x in config_list])
|
return '|'.join([str(int(x)) for x in config_list])
|
||||||
|
|
||||||
def get_id(self):
|
def get_id(self):
|
||||||
|
@ -161,12 +163,14 @@ class GenericMetadata():
|
||||||
|
|
||||||
def _has_season_all_poster(self, show_obj):
|
def _has_season_all_poster(self, show_obj):
|
||||||
result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj))
|
result = ek.ek(os.path.isfile, self.get_season_all_poster_path(show_obj))
|
||||||
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result), logger.DEBUG)
|
logger.log(u"Checking if " + self.get_season_all_poster_path(show_obj) + " exists: " + str(result),
|
||||||
|
logger.DEBUG)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _has_season_all_banner(self, show_obj):
|
def _has_season_all_banner(self, show_obj):
|
||||||
result = ek.ek(os.path.isfile, self.get_season_all_banner_path(show_obj))
|
result = ek.ek(os.path.isfile, self.get_season_all_banner_path(show_obj))
|
||||||
logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG)
|
logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result),
|
||||||
|
logger.DEBUG)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def get_show_file_path(self, show_obj):
|
def get_show_file_path(self, show_obj):
|
||||||
|
@ -264,7 +268,8 @@ class GenericMetadata():
|
||||||
|
|
||||||
def create_episode_metadata(self, ep_obj, force=False):
|
def create_episode_metadata(self, ep_obj, force=False):
|
||||||
if self.episode_metadata and ep_obj and (not self._has_episode_metadata(ep_obj) or force):
|
if self.episode_metadata and ep_obj and (not self._has_episode_metadata(ep_obj) or force):
|
||||||
logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(), logger.DEBUG)
|
logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(),
|
||||||
|
logger.DEBUG)
|
||||||
return self.write_ep_file(ep_obj)
|
return self.write_ep_file(ep_obj)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -288,7 +293,8 @@ class GenericMetadata():
|
||||||
|
|
||||||
def create_episode_thumb(self, ep_obj):
|
def create_episode_thumb(self, ep_obj):
|
||||||
if self.episode_thumbnails and ep_obj and not self._has_episode_thumb(ep_obj):
|
if self.episode_thumbnails and ep_obj and not self._has_episode_thumb(ep_obj):
|
||||||
logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.prettyName(), logger.DEBUG)
|
logger.log(u"Metadata provider " + self.name + " creating episode thumbnail for " + ep_obj.prettyName(),
|
||||||
|
logger.DEBUG)
|
||||||
return self.save_thumbnail(ep_obj)
|
return self.save_thumbnail(ep_obj)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -297,7 +303,8 @@ class GenericMetadata():
|
||||||
result = []
|
result = []
|
||||||
for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable
|
for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable
|
||||||
if not self._has_season_poster(show_obj, season):
|
if not self._has_season_poster(show_obj, season):
|
||||||
logger.log(u"Metadata provider " + self.name + " creating season posters for " + show_obj.name, logger.DEBUG)
|
logger.log(u"Metadata provider " + self.name + " creating season posters for " + show_obj.name,
|
||||||
|
logger.DEBUG)
|
||||||
result = result + [self.save_season_posters(show_obj, season)]
|
result = result + [self.save_season_posters(show_obj, season)]
|
||||||
return all(result)
|
return all(result)
|
||||||
return False
|
return False
|
||||||
|
@ -307,20 +314,23 @@ class GenericMetadata():
|
||||||
result = []
|
result = []
|
||||||
for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable
|
for season, episodes in show_obj.episodes.iteritems(): # @UnusedVariable
|
||||||
if not self._has_season_banner(show_obj, season):
|
if not self._has_season_banner(show_obj, season):
|
||||||
logger.log(u"Metadata provider " + self.name + " creating season banners for " + show_obj.name, logger.DEBUG)
|
logger.log(u"Metadata provider " + self.name + " creating season banners for " + show_obj.name,
|
||||||
|
logger.DEBUG)
|
||||||
result = result + [self.save_season_banners(show_obj, season)]
|
result = result + [self.save_season_banners(show_obj, season)]
|
||||||
return all(result)
|
return all(result)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def create_season_all_poster(self, show_obj):
|
def create_season_all_poster(self, show_obj):
|
||||||
if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj):
|
if self.season_all_poster and show_obj and not self._has_season_all_poster(show_obj):
|
||||||
logger.log(u"Metadata provider " + self.name + " creating season all poster for " + show_obj.name, logger.DEBUG)
|
logger.log(u"Metadata provider " + self.name + " creating season all poster for " + show_obj.name,
|
||||||
|
logger.DEBUG)
|
||||||
return self.save_season_all_poster(show_obj)
|
return self.save_season_all_poster(show_obj)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def create_season_all_banner(self, show_obj):
|
def create_season_all_banner(self, show_obj):
|
||||||
if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj):
|
if self.season_all_banner and show_obj and not self._has_season_all_banner(show_obj):
|
||||||
logger.log(u"Metadata provider " + self.name + " creating season all banner for " + show_obj.name, logger.DEBUG)
|
logger.log(u"Metadata provider " + self.name + " creating season all banner for " + show_obj.name,
|
||||||
|
logger.DEBUG)
|
||||||
return self.save_season_all_banner(show_obj)
|
return self.save_season_all_banner(show_obj)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -349,21 +359,24 @@ class GenericMetadata():
|
||||||
if ep_obj.show.dvdorder != 0:
|
if ep_obj.show.dvdorder != 0:
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
indexer_show_obj = t[ep_obj.show.indexerid]
|
indexer_show_obj = t[ep_obj.show.indexerid]
|
||||||
except indexer_exceptions.indexer_shownotfound, e:
|
except sickbeard.indexer_shownotfound, e:
|
||||||
raise exceptions.ShowNotFoundException(e.message)
|
raise exceptions.ShowNotFoundException(e.message)
|
||||||
except indexer_exceptions.indexer_error, e:
|
except sickbeard.indexer_error, e:
|
||||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# try all included episodes in case some have thumbs and others don't
|
# try all included episodes in case some have thumbs and others don't
|
||||||
for cur_ep in all_eps:
|
for cur_ep in all_eps:
|
||||||
try:
|
try:
|
||||||
myEp = indexer_show_obj[cur_ep.season][cur_ep.episode]
|
myEp = indexer_show_obj[cur_ep.season][cur_ep.episode]
|
||||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||||
logger.log(u"Unable to find episode " + str(cur_ep.season) + "x" + str(cur_ep.episode) + " on " + ep_obj.show.indexer + ".. has it been removed? Should I delete from db?")
|
logger.log(u"Unable to find episode " + str(cur_ep.season) + "x" + str(
|
||||||
|
cur_ep.episode) + " on " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
thumb_url = getattr(myEp, 'filename', None)
|
thumb_url = getattr(myEp, 'filename', None)
|
||||||
|
@ -410,7 +423,8 @@ class GenericMetadata():
|
||||||
nfo_file.close()
|
nfo_file.close()
|
||||||
helpers.chmodAsParent(nfo_file_path)
|
helpers.chmodAsParent(nfo_file_path)
|
||||||
except IOError, e:
|
except IOError, e:
|
||||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR)
|
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -454,7 +468,8 @@ class GenericMetadata():
|
||||||
nfo_file.close()
|
nfo_file.close()
|
||||||
helpers.chmodAsParent(nfo_file_path)
|
helpers.chmodAsParent(nfo_file_path)
|
||||||
except IOError, e:
|
except IOError, e:
|
||||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR)
|
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -580,7 +595,8 @@ class GenericMetadata():
|
||||||
season_poster_file_path = self.get_season_poster_path(show_obj, cur_season)
|
season_poster_file_path = self.get_season_poster_path(show_obj, cur_season)
|
||||||
|
|
||||||
if not season_poster_file_path:
|
if not season_poster_file_path:
|
||||||
logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", logger.DEBUG)
|
logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
seasonData = metadata_helpers.getShowImage(season_url)
|
seasonData = metadata_helpers.getShowImage(season_url)
|
||||||
|
@ -627,7 +643,8 @@ class GenericMetadata():
|
||||||
season_banner_file_path = self.get_season_banner_path(show_obj, cur_season)
|
season_banner_file_path = self.get_season_banner_path(show_obj, cur_season)
|
||||||
|
|
||||||
if not season_banner_file_path:
|
if not season_banner_file_path:
|
||||||
logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season", logger.DEBUG)
|
logger.log(u"Path for season " + str(cur_season) + " came back blank, skipping this season",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
seasonData = metadata_helpers.getShowImage(season_url)
|
seasonData = metadata_helpers.getShowImage(season_url)
|
||||||
|
@ -699,7 +716,9 @@ class GenericMetadata():
|
||||||
outFile.close()
|
outFile.close()
|
||||||
helpers.chmodAsParent(image_path)
|
helpers.chmodAsParent(image_path)
|
||||||
except IOError, e:
|
except IOError, e:
|
||||||
logger.log(u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e), logger.ERROR)
|
logger.log(
|
||||||
|
u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e),
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -730,14 +749,16 @@ class GenericMetadata():
|
||||||
if show_obj.dvdorder != 0:
|
if show_obj.dvdorder != 0:
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
indexer_show_obj = t[show_obj.indexerid]
|
indexer_show_obj = t[show_obj.indexerid]
|
||||||
except (indexer_exceptions.indexer_error, IOError), e:
|
except (sickbeard.indexer_error, IOError), e:
|
||||||
logger.log(u"Unable to look up show on " + show_obj.indexer + ", not downloading images: " + ex(e), logger.ERROR)
|
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||||
|
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if image_type not in ('fanart', 'poster', 'banner', 'poster_thumb', 'banner_thumb'):
|
if image_type not in ('fanart', 'poster', 'banner', 'poster_thumb', 'banner_thumb'):
|
||||||
logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + show_obj.indexer + " object", logger.ERROR)
|
logger.log(u"Invalid image type " + str(image_type) + ", couldn't find it in the " + sickbeard.indexerApi(
|
||||||
|
show_obj.indexer).name + " object", logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if image_type == 'poster_thumb':
|
if image_type == 'poster_thumb':
|
||||||
|
@ -793,10 +814,11 @@ class GenericMetadata():
|
||||||
if show_obj.dvdorder != 0:
|
if show_obj.dvdorder != 0:
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
indexer_show_obj = t[show_obj.indexerid]
|
indexer_show_obj = t[show_obj.indexerid]
|
||||||
except (indexer_exceptions.indexer_error, IOError), e:
|
except (sickbeard.indexer_error, IOError), e:
|
||||||
logger.log(u"Unable to look up show on " + show_obj.indexer + ", not downloading images: " + ex(e), logger.ERROR)
|
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||||
|
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# if we have no season banners then just finish
|
# if we have no season banners then just finish
|
||||||
|
@ -845,10 +867,11 @@ class GenericMetadata():
|
||||||
if indexer_lang and not indexer_lang == 'en':
|
if indexer_lang and not indexer_lang == 'en':
|
||||||
lINDEXER_API_PARMS['language'] = indexer_lang
|
lINDEXER_API_PARMS['language'] = indexer_lang
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
indexer_show_obj = t[show_obj.indexerid]
|
indexer_show_obj = t[show_obj.indexerid]
|
||||||
except (indexer_exceptions.indexer_error, IOError), e:
|
except (sickbeard.indexer_error, IOError), e:
|
||||||
logger.log(u"Unable to look up show on " + show_obj.indexer + ", not downloading images: " + ex(e), logger.ERROR)
|
logger.log(u"Unable to look up show on " + sickbeard.indexerApi(
|
||||||
|
show_obj.indexer).name + ", not downloading images: " + ex(e), logger.ERROR)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# if we have no season banners then just finish
|
# if we have no season banners then just finish
|
||||||
|
@ -890,8 +913,10 @@ class GenericMetadata():
|
||||||
with ek.ek(open, metadata_path, 'r') as xmlFileObj:
|
with ek.ek(open, metadata_path, 'r') as xmlFileObj:
|
||||||
showXML = etree.ElementTree(file=xmlFileObj)
|
showXML = etree.ElementTree(file=xmlFileObj)
|
||||||
|
|
||||||
if showXML.findtext('title') == None\
|
if showXML.findtext('title') == None \
|
||||||
or (showXML.findtext('tvdbid') == None and showXML.findtext('id') == None):
|
or (showXML.findtext('tvdbid') == None
|
||||||
|
and showXML.findtext('id') == None) \
|
||||||
|
and showXML.findtext('indexer') == None:
|
||||||
logger.log(u"Invalid info in tvshow.nfo (missing name or id):" \
|
logger.log(u"Invalid info in tvshow.nfo (missing name or id):" \
|
||||||
+ str(showXML.findtext('title')) + " " \
|
+ str(showXML.findtext('title')) + " " \
|
||||||
+ str(showXML.findtext('indexer')) + " " \
|
+ str(showXML.findtext('indexer')) + " " \
|
||||||
|
@ -900,7 +925,12 @@ class GenericMetadata():
|
||||||
return empty_return
|
return empty_return
|
||||||
|
|
||||||
name = showXML.findtext('title')
|
name = showXML.findtext('title')
|
||||||
indexer = showXML.findtext('indexer')
|
|
||||||
|
try:
|
||||||
|
indexer = int(showXML.findtext('indexer'))
|
||||||
|
except:
|
||||||
|
indexer = None
|
||||||
|
|
||||||
if showXML.findtext('tvdbid') != None:
|
if showXML.findtext('tvdbid') != None:
|
||||||
indexer_id = int(showXML.findtext('tvdbid'))
|
indexer_id = int(showXML.findtext('tvdbid'))
|
||||||
elif showXML.findtext('id') != None:
|
elif showXML.findtext('id') != None:
|
||||||
|
@ -914,7 +944,9 @@ class GenericMetadata():
|
||||||
return empty_return
|
return empty_return
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e), logger.WARNING)
|
logger.log(
|
||||||
|
u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e),
|
||||||
|
logger.WARNING)
|
||||||
return empty_return
|
return empty_return
|
||||||
|
|
||||||
return (indexer_id, name, indexer)
|
return (indexer_id, name, indexer)
|
||||||
|
@ -931,6 +963,7 @@ class GenericMetadata():
|
||||||
|
|
||||||
def size_str_to_int(x):
|
def size_str_to_int(x):
|
||||||
return float("inf") if x == 'original' else int(x[1:])
|
return float("inf") if x == 'original' else int(x[1:])
|
||||||
|
|
||||||
max_size = max(sizes, key=size_str_to_int)
|
max_size = max(sizes, key=size_str_to_int)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -21,7 +21,6 @@ from sickbeard import logger
|
||||||
|
|
||||||
|
|
||||||
def getShowImage(url, imgNum=None):
|
def getShowImage(url, imgNum=None):
|
||||||
|
|
||||||
image_data = None # @UnusedVariable
|
image_data = None # @UnusedVariable
|
||||||
|
|
||||||
if url == None:
|
if url == None:
|
||||||
|
|
|
@ -27,7 +27,6 @@ import generic
|
||||||
from sickbeard import logger, exceptions, helpers
|
from sickbeard import logger, exceptions, helpers
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
|
|
||||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
import xml.etree.cElementTree as etree
|
import xml.etree.cElementTree as etree
|
||||||
|
@ -145,7 +144,8 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
||||||
If no season folder exists, None is returned
|
If no season folder exists, None is returned
|
||||||
"""
|
"""
|
||||||
|
|
||||||
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if
|
||||||
|
ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
||||||
|
|
||||||
season_dir_regex = '^Season\s+(\d+)$'
|
season_dir_regex = '^Season\s+(\d+)$'
|
||||||
|
|
||||||
|
@ -184,7 +184,8 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
||||||
If no season folder exists, None is returned
|
If no season folder exists, None is returned
|
||||||
"""
|
"""
|
||||||
|
|
||||||
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if
|
||||||
|
ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
||||||
|
|
||||||
season_dir_regex = '^Season\s+(\d+)$'
|
season_dir_regex = '^Season\s+(\d+)$'
|
||||||
|
|
||||||
|
@ -237,32 +238,36 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
||||||
if show_obj.dvdorder != 0:
|
if show_obj.dvdorder != 0:
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
tv_node = etree.Element("Series")
|
tv_node = etree.Element("Series")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
myShow = t[int(show_obj.indexerid)]
|
myShow = t[int(show_obj.indexerid)]
|
||||||
except indexer_exceptions.indexer_shownotfound:
|
except sickbeard.indexer_shownotfound:
|
||||||
logger.log(u"Unable to find show with id " + str(show_obj.indexerid) + " on " + show_obj.indexer + ", skipping it", logger.ERROR)
|
logger.log(u"Unable to find show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi(
|
||||||
|
show_obj.indexer).name + ", skipping it", logger.ERROR)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except indexer_exceptions.indexer_error:
|
except sickbeard.indexer_error:
|
||||||
logger.log(u"" + show_obj.indexer + " is down, can't use its data to make the NFO", logger.ERROR)
|
logger.log(
|
||||||
|
u"" + sickbeard.indexerApi(show_obj.indexer).name + " is down, can't use its data to make the NFO",
|
||||||
|
logger.ERROR)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# check for title and id
|
# check for title and id
|
||||||
if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None:
|
if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None:
|
||||||
logger.log(u"Incomplete info for show with id " + str(show_obj.indexerid) + " on " + show_obj.indexer + ", skipping it", logger.ERROR)
|
logger.log(u"Incomplete info for show with id " + str(show_obj.indexerid) + " on " + sickbeard.indexerApi(
|
||||||
|
show_obj.indexer).name + ", skipping it", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
indexerid = etree.SubElement(tv_node, "id")
|
indexerid = etree.SubElement(tv_node, "id")
|
||||||
if getattr(myShow, 'id', None) is not None:
|
if getattr(myShow, 'id', None) is not None:
|
||||||
indexerid.text = myShow['id']
|
indexerid.text = str(myShow['id'])
|
||||||
|
|
||||||
indexer = etree.SubElement(tv_node, "indexer")
|
indexer = etree.SubElement(tv_node, "indexer")
|
||||||
if show_obj.indexer != None:
|
if show_obj.indexer != None:
|
||||||
indexer.text = show_obj.indexer
|
indexer.text = str(show_obj.indexer)
|
||||||
|
|
||||||
SeriesName = etree.SubElement(tv_node, "SeriesName")
|
SeriesName = etree.SubElement(tv_node, "SeriesName")
|
||||||
if getattr(myShow, 'seriesname', None) is not None:
|
if getattr(myShow, 'seriesname', None) is not None:
|
||||||
|
@ -400,13 +405,14 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
||||||
if ep_obj.show.dvdorder != 0:
|
if ep_obj.show.dvdorder != 0:
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
myShow = t[ep_obj.show.indexerid]
|
myShow = t[ep_obj.show.indexerid]
|
||||||
except indexer_exceptions.indexer_shownotfound, e:
|
except sickbeard.indexer_shownotfound, e:
|
||||||
raise exceptions.ShowNotFoundException(e.message)
|
raise exceptions.ShowNotFoundException(e.message)
|
||||||
except indexer_exceptions.indexer_error, e:
|
except sickbeard.indexer_error, e:
|
||||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
rootNode = etree.Element("Item")
|
rootNode = etree.Element("Item")
|
||||||
|
@ -416,8 +422,10 @@ class MediaBrowserMetadata(generic.GenericMetadata):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
||||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + ".. has it been removed? Should I delete from db?")
|
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(
|
||||||
|
curEpToWrite.episode) + " on " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if curEpToWrite == ep_obj:
|
if curEpToWrite == ep_obj:
|
||||||
|
|
|
@ -30,9 +30,6 @@ from sickbeard import encodingKludge as ek
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
|
|
||||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
|
||||||
|
|
||||||
|
|
||||||
class TIVOMetadata(generic.GenericMetadata):
|
class TIVOMetadata(generic.GenericMetadata):
|
||||||
"""
|
"""
|
||||||
Metadata generation class for TIVO
|
Metadata generation class for TIVO
|
||||||
|
@ -179,20 +176,23 @@ class TIVOMetadata(generic.GenericMetadata):
|
||||||
if ep_obj.show.dvdorder != 0:
|
if ep_obj.show.dvdorder != 0:
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
myShow = t[ep_obj.show.indexerid]
|
myShow = t[ep_obj.show.indexerid]
|
||||||
except indexer_exceptions.indexer_shownotfound, e:
|
except sickbeard.indexer_shownotfound, e:
|
||||||
raise exceptions.ShowNotFoundException(str(e))
|
raise exceptions.ShowNotFoundException(str(e))
|
||||||
except indexer_exceptions.indexer_error, e:
|
except sickbeard.indexer_error, e:
|
||||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + str(e), logger.ERROR)
|
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + " while creating meta files - skipping - " + str(e), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for curEpToWrite in eps_to_write:
|
for curEpToWrite in eps_to_write:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
||||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + "... has it been removed? Should I delete from db?")
|
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(
|
||||||
|
curEpToWrite.episode) + " on " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + "... has it been removed? Should I delete from db?")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if getattr(myEp, 'firstaired', None) is None and ep_obj.season == 0:
|
if getattr(myEp, 'firstaired', None) is None and ep_obj.season == 0:
|
||||||
|
@ -230,7 +230,8 @@ class TIVOMetadata(generic.GenericMetadata):
|
||||||
# Replace double curly quotes
|
# Replace double curly quotes
|
||||||
sanitizedDescription = sanitizedDescription.replace(u"\u201c", "\"").replace(u"\u201d", "\"")
|
sanitizedDescription = sanitizedDescription.replace(u"\u201c", "\"").replace(u"\u201d", "\"")
|
||||||
# Replace single curly quotes
|
# Replace single curly quotes
|
||||||
sanitizedDescription = sanitizedDescription.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u02BC", "'")
|
sanitizedDescription = sanitizedDescription.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(
|
||||||
|
u"\u02BC", "'")
|
||||||
|
|
||||||
data += ("description : " + sanitizedDescription + "\n")
|
data += ("description : " + sanitizedDescription + "\n")
|
||||||
|
|
||||||
|
@ -324,7 +325,8 @@ class TIVOMetadata(generic.GenericMetadata):
|
||||||
helpers.chmodAsParent(nfo_file_path)
|
helpers.chmodAsParent(nfo_file_path)
|
||||||
|
|
||||||
except EnvironmentError, e:
|
except EnvironmentError, e:
|
||||||
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR)
|
logger.log(u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -27,7 +27,6 @@ import generic
|
||||||
from sickbeard import logger, exceptions, helpers
|
from sickbeard import logger, exceptions, helpers
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
|
|
||||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
import xml.etree.cElementTree as etree
|
import xml.etree.cElementTree as etree
|
||||||
|
@ -135,7 +134,8 @@ class WDTVMetadata(generic.GenericMetadata):
|
||||||
If no season folder exists, None is returned
|
If no season folder exists, None is returned
|
||||||
"""
|
"""
|
||||||
|
|
||||||
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if
|
||||||
|
ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))]
|
||||||
|
|
||||||
season_dir_regex = '^Season\s+(\d+)$'
|
season_dir_regex = '^Season\s+(\d+)$'
|
||||||
|
|
||||||
|
@ -187,12 +187,13 @@ class WDTVMetadata(generic.GenericMetadata):
|
||||||
if ep_obj.show.dvdorder != 0:
|
if ep_obj.show.dvdorder != 0:
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
myShow = t[ep_obj.show.indexerid]
|
myShow = t[ep_obj.show.indexerid]
|
||||||
except indexer_exceptions.indexer_shownotfound, e:
|
except sickbeard.indexer_shownotfound, e:
|
||||||
raise exceptions.ShowNotFoundException(e.message)
|
raise exceptions.ShowNotFoundException(e.message)
|
||||||
except indexer_exceptions.indexer_error, e:
|
except sickbeard.indexer_error, e:
|
||||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
rootNode = etree.Element("details")
|
rootNode = etree.Element("details")
|
||||||
|
@ -202,8 +203,10 @@ class WDTVMetadata(generic.GenericMetadata):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
||||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + "... has it been removed? Should I delete from db?")
|
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(
|
||||||
|
curEpToWrite.episode) + " on " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + "... has it been removed? Should I delete from db?")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if getattr(myEp, 'firstaired', None) is None and ep_obj.season == 0:
|
if getattr(myEp, 'firstaired', None) is None and ep_obj.season == 0:
|
||||||
|
|
|
@ -20,8 +20,6 @@ import datetime
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
|
||||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
|
||||||
|
|
||||||
from sickbeard import logger, exceptions, helpers
|
from sickbeard import logger, exceptions, helpers
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
|
@ -109,23 +107,27 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
||||||
if show_obj.dvdorder != 0:
|
if show_obj.dvdorder != 0:
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
tv_node = etree.Element("tvshow")
|
tv_node = etree.Element("tvshow")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
myShow = t[int(show_ID)]
|
myShow = t[int(show_ID)]
|
||||||
except indexer_exceptions.indexer_shownotfound:
|
except sickbeard.indexer_shownotfound:
|
||||||
logger.log(u"Unable to find show with id " + str(show_ID) + " on " + show_obj.indexer + ", skipping it", logger.ERROR)
|
logger.log(u"Unable to find show with id " + str(show_ID) + " on " + sickbeard.indexerApi(
|
||||||
|
show_obj.indexer).name + ", skipping it", logger.ERROR)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except indexer_exceptions.indexer_error:
|
except sickbeard.indexer_error:
|
||||||
logger.log(u"" + show_obj.indexer + " is down, can't use its data to add this show", logger.ERROR)
|
logger.log(
|
||||||
|
u"" + sickbeard.indexerApi(show_obj.indexer).name + " is down, can't use its data to add this show",
|
||||||
|
logger.ERROR)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# check for title and id
|
# check for title and id
|
||||||
if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None:
|
if getattr(myShow, 'seriesname', None) is None or getattr(myShow, 'id', None) is None:
|
||||||
logger.log(u"Incomplete info for show with id " + str(show_ID) + " on " + show_obj.indexer + ", skipping it", logger.ERROR)
|
logger.log(u"Incomplete info for show with id " + str(show_ID) + " on " + sickbeard.indexerApi(
|
||||||
|
show_obj.indexer).name + ", skipping it", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
title = etree.SubElement(tv_node, "title")
|
title = etree.SubElement(tv_node, "title")
|
||||||
|
@ -153,7 +155,7 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
||||||
episodeguideurl = etree.SubElement(episodeguide, "url")
|
episodeguideurl = etree.SubElement(episodeguide, "url")
|
||||||
episodeguideurl2 = etree.SubElement(tv_node, "episodeguideurl")
|
episodeguideurl2 = etree.SubElement(tv_node, "episodeguideurl")
|
||||||
if getattr(myShow, 'id', None) is not None:
|
if getattr(myShow, 'id', None) is not None:
|
||||||
showurl = t.base_url + myShow["id"] + '/all/en.zip'
|
showurl = sickbeard.indexerApi(show_obj.indexer).config['base_url'] + str(myShow["id"]) + '/all/en.zip'
|
||||||
episodeguideurl.text = showurl
|
episodeguideurl.text = showurl
|
||||||
episodeguideurl2.text = showurl
|
episodeguideurl2.text = showurl
|
||||||
|
|
||||||
|
@ -163,11 +165,11 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
||||||
|
|
||||||
indexerid = etree.SubElement(tv_node, "id")
|
indexerid = etree.SubElement(tv_node, "id")
|
||||||
if getattr(myShow, 'id', None) is not None:
|
if getattr(myShow, 'id', None) is not None:
|
||||||
indexerid.text = myShow["id"]
|
indexerid.text = str(myShow["id"])
|
||||||
|
|
||||||
indexer = etree.SubElement(tv_node, "indexer")
|
indexer = etree.SubElement(tv_node, "indexer")
|
||||||
if show_obj.indexer is not None:
|
if show_obj.indexer is not None:
|
||||||
indexer.text = show_obj.indexer
|
indexer.text = str(show_obj.indexer)
|
||||||
|
|
||||||
genre = etree.SubElement(tv_node, "genre")
|
genre = etree.SubElement(tv_node, "genre")
|
||||||
if getattr(myShow, 'genre', None) is not None:
|
if getattr(myShow, 'genre', None) is not None:
|
||||||
|
@ -230,12 +232,13 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
||||||
lINDEXER_API_PARMS['dvdorder'] = True
|
lINDEXER_API_PARMS['dvdorder'] = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
myShow = t[ep_obj.show.indexerid]
|
myShow = t[ep_obj.show.indexerid]
|
||||||
except indexer_exceptions.indexer_shownotfound, e:
|
except sickbeard.indexer_shownotfound, e:
|
||||||
raise exceptions.ShowNotFoundException(e.message)
|
raise exceptions.ShowNotFoundException(e.message)
|
||||||
except indexer_exceptions.indexer_error, e:
|
except sickbeard.indexer_error, e:
|
||||||
logger.log(u"Unable to connect to " + ep_obj.show.indexer + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
logger.log(u"Unable to connect to " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + " while creating meta files - skipping - " + ex(e), logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
if len(eps_to_write) > 1:
|
if len(eps_to_write) > 1:
|
||||||
|
@ -248,8 +251,10 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
|
||||||
except (indexer_exceptions.indexer_episodenotfound, indexer_exceptions.indexer_seasonnotfound):
|
except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound):
|
||||||
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on " + ep_obj.show.indexer + ".. has it been removed? Should I delete from db?")
|
logger.log(u"Unable to find episode " + str(curEpToWrite.season) + "x" + str(
|
||||||
|
curEpToWrite.episode) + " on " + sickbeard.indexerApi(
|
||||||
|
ep_obj.show.indexer).name + ".. has it been removed? Should I delete from db?")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if getattr(myEp, 'firstaired', None) is None:
|
if getattr(myEp, 'firstaired', None) is None:
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
from sickbeard.helpers import sanitizeSceneName
|
from sickbeard.helpers import sanitizeSceneName
|
||||||
|
|
||||||
|
|
||||||
def addNameToCache(name, indexer_id):
|
def addNameToCache(name, indexer_id):
|
||||||
"""
|
"""
|
||||||
Adds the show & tvdb id to the scene_names table in cache.db.
|
Adds the show & tvdb id to the scene_names table in cache.db.
|
||||||
|
@ -36,6 +37,7 @@ def addNameToCache(name, indexer_id):
|
||||||
cacheDB = db.DBConnection('cache.db')
|
cacheDB = db.DBConnection('cache.db')
|
||||||
cacheDB.action("INSERT INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name])
|
cacheDB.action("INSERT INTO scene_names (indexer_id, name) VALUES (?, ?)", [indexer_id, name])
|
||||||
|
|
||||||
|
|
||||||
def retrieveNameFromCache(name):
|
def retrieveNameFromCache(name):
|
||||||
"""
|
"""
|
||||||
Looks up the given name in the scene_names table in cache.db.
|
Looks up the given name in the scene_names table in cache.db.
|
||||||
|
@ -56,6 +58,7 @@ def retrieveNameFromCache(name):
|
||||||
|
|
||||||
return int(cache_results[0]["indexer_id"])
|
return int(cache_results[0]["indexer_id"])
|
||||||
|
|
||||||
|
|
||||||
def clearCache():
|
def clearCache():
|
||||||
"""
|
"""
|
||||||
Deletes all "unknown" entries from the cache (names with indexer_id of 0).
|
Deletes all "unknown" entries from the cache (names with indexer_id of 0).
|
||||||
|
|
|
@ -26,13 +26,12 @@ import calendar
|
||||||
|
|
||||||
from sickbeard import logger, classes
|
from sickbeard import logger, classes
|
||||||
from sickbeard import scene_numbering, scene_exceptions
|
from sickbeard import scene_numbering, scene_exceptions
|
||||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
|
||||||
from sickbeard.common import indexerStrings
|
|
||||||
|
|
||||||
from lib.dateutil.parser import parse
|
from lib.dateutil.parser import parse
|
||||||
|
|
||||||
from time import strptime
|
from time import strptime
|
||||||
|
|
||||||
|
|
||||||
class NameParser(object):
|
class NameParser(object):
|
||||||
def __init__(self, file_name=True):
|
def __init__(self, file_name=True):
|
||||||
|
|
||||||
|
@ -96,21 +95,21 @@ class NameParser(object):
|
||||||
|
|
||||||
if 'season_num' in named_groups:
|
if 'season_num' in named_groups:
|
||||||
tmp_season = int(match.group('season_num'))
|
tmp_season = int(match.group('season_num'))
|
||||||
if cur_regex_name == 'bare' and tmp_season in (19,20):
|
if cur_regex_name == 'bare' and tmp_season in (19, 20):
|
||||||
continue
|
continue
|
||||||
result.season_number = tmp_season
|
result.season_number = tmp_season
|
||||||
|
|
||||||
if 'ep_num' in named_groups:
|
if 'ep_num' in named_groups:
|
||||||
ep_num = self._convert_number(match.group('ep_num'))
|
ep_num = self._convert_number(match.group('ep_num'))
|
||||||
if 'extra_ep_num' in named_groups and match.group('extra_ep_num'):
|
if 'extra_ep_num' in named_groups and match.group('extra_ep_num'):
|
||||||
result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num'))+1)
|
result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1)
|
||||||
else:
|
else:
|
||||||
result.episode_numbers = [ep_num]
|
result.episode_numbers = [ep_num]
|
||||||
|
|
||||||
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
|
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
|
||||||
if 'scene_sports_date_format' in cur_regex_name:
|
if 'scene_sports_date_format' in cur_regex_name:
|
||||||
year = match.group('air_year')
|
year = match.group('air_year')
|
||||||
month = strptime(match.group('air_month')[:3],'%b').tm_mon
|
month = strptime(match.group('air_month')[:3], '%b').tm_mon
|
||||||
day = re.sub("(st|nd|rd|th)", "", match.group('air_day'))
|
day = re.sub("(st|nd|rd|th)", "", match.group('air_day'))
|
||||||
else:
|
else:
|
||||||
year = int(match.group('air_year'))
|
year = int(match.group('air_year'))
|
||||||
|
@ -127,7 +126,8 @@ class NameParser(object):
|
||||||
tmp_extra_info = match.group('extra_info')
|
tmp_extra_info = match.group('extra_info')
|
||||||
|
|
||||||
# Show.S04.Special is almost certainly not every episode in the season
|
# Show.S04.Special is almost certainly not every episode in the season
|
||||||
if tmp_extra_info and cur_regex_name == 'season_only' and re.match(r'([. _-]|^)(special|extra)\w*([. _-]|$)', tmp_extra_info, re.I):
|
if tmp_extra_info and cur_regex_name == 'season_only' and re.match(
|
||||||
|
r'([. _-]|^)(special|extra)\w*([. _-]|$)', tmp_extra_info, re.I):
|
||||||
continue
|
continue
|
||||||
result.extra_info = tmp_extra_info
|
result.extra_info = tmp_extra_info
|
||||||
|
|
||||||
|
@ -160,7 +160,7 @@ class NameParser(object):
|
||||||
else:
|
else:
|
||||||
return b
|
return b
|
||||||
|
|
||||||
def _unicodify(self, obj, encoding = "utf-8"):
|
def _unicodify(self, obj, encoding="utf-8"):
|
||||||
if isinstance(obj, basestring):
|
if isinstance(obj, basestring):
|
||||||
if not isinstance(obj, unicode):
|
if not isinstance(obj, unicode):
|
||||||
obj = unicode(obj, encoding)
|
obj = unicode(obj, encoding)
|
||||||
|
@ -255,7 +255,8 @@ class NameParser(object):
|
||||||
return final_result
|
return final_result
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def series_name_to_indexer_id(cls, series_name, check_scene_exceptions=True, check_database=True, check_indexer=False):
|
def series_name_to_indexer_id(cls, series_name, check_scene_exceptions=True, check_database=True,
|
||||||
|
check_indexer=False):
|
||||||
"""
|
"""
|
||||||
Given a series name, return it's tvdbd_id.
|
Given a series name, return it's tvdbd_id.
|
||||||
Returns None if not found.
|
Returns None if not found.
|
||||||
|
@ -269,31 +270,32 @@ class NameParser(object):
|
||||||
# for each possible interpretation of that scene name
|
# for each possible interpretation of that scene name
|
||||||
if check_scene_exceptions:
|
if check_scene_exceptions:
|
||||||
for cur_name in name_list:
|
for cur_name in name_list:
|
||||||
logger.log(u"Checking scene exceptions for a match on "+cur_name, logger.DEBUG)
|
logger.log(u"Checking scene exceptions for a match on " + cur_name, logger.DEBUG)
|
||||||
scene_id = sickbeard.scene_exceptions.get_scene_exception_by_name(cur_name)
|
scene_id = sickbeard.scene_exceptions.get_scene_exception_by_name(cur_name)
|
||||||
if scene_id: return scene_id
|
if scene_id: return scene_id
|
||||||
|
|
||||||
# see if we can find the name directly in the DB, if so use it
|
# see if we can find the name directly in the DB, if so use it
|
||||||
if check_database:
|
if check_database:
|
||||||
for cur_name in name_list:
|
for cur_name in name_list:
|
||||||
logger.log(u"Looking up "+cur_name+u" in the DB", logger.DEBUG)
|
logger.log(u"Looking up " + str(cur_name) + " in the DB", logger.DEBUG)
|
||||||
db_result = sickbeard.helpers.searchDBForShow(cur_name)
|
db_result = sickbeard.helpers.searchDBForShow(cur_name)
|
||||||
if db_result: return db_result[1]
|
if db_result: return db_result[1]
|
||||||
|
|
||||||
# see if we can find the name with a TVDB lookup
|
# see if we can find the name with a TVDB lookup
|
||||||
if check_indexer:
|
if check_indexer:
|
||||||
for cur_name in name_list:
|
for cur_name in name_list:
|
||||||
for indexer in indexerStrings:
|
for indexer in sickbeard.indexerApi().indexers:
|
||||||
try:
|
try:
|
||||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||||
|
|
||||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
logger.log(u"Looking up name "+cur_name+u" on the Indexer", logger.DEBUG)
|
logger.log(u"Looking up name " + str(cur_name) + " on " + sickbeard.indexerApi(indexer).name,
|
||||||
|
logger.DEBUG)
|
||||||
showObj = t[cur_name]
|
showObj = t[cur_name]
|
||||||
except (indexer_exceptions):
|
except (sickbeard.indexer_exception):
|
||||||
# if none found, search on all languages
|
# if none found, search on all languages
|
||||||
try:
|
try:
|
||||||
lINDEXER_API_PARMS = {'indexer': indexer}
|
lINDEXER_API_PARMS = {'indexer': indexer}
|
||||||
|
@ -301,11 +303,13 @@ class NameParser(object):
|
||||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||||
lINDEXER_API_PARMS['search_all_languages'] = True
|
lINDEXER_API_PARMS['search_all_languages'] = True
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
logger.log(u"Looking up name "+cur_name+u" in all languages on the Indexer", logger.DEBUG)
|
logger.log(
|
||||||
|
u"Looking up name " + str(cur_name) + " in all languages on " + sickbeard.indexerApi(
|
||||||
|
indexer).name, logger.DEBUG)
|
||||||
showObj = t[cur_name]
|
showObj = t[cur_name]
|
||||||
except (indexer_exceptions.indexer_exception, IOError):
|
except (sickbeard.indexer_exception, IOError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
@ -316,6 +320,7 @@ class NameParser(object):
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class ParseResult(object):
|
class ParseResult(object):
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
original_name,
|
original_name,
|
||||||
|
@ -368,10 +373,10 @@ class ParseResult(object):
|
||||||
else:
|
else:
|
||||||
to_return = u''
|
to_return = u''
|
||||||
if self.season_number != None:
|
if self.season_number != None:
|
||||||
to_return += 'S'+str(self.season_number)
|
to_return += 'S' + str(self.season_number)
|
||||||
if self.episode_numbers and len(self.episode_numbers):
|
if self.episode_numbers and len(self.episode_numbers):
|
||||||
for e in self.episode_numbers:
|
for e in self.episode_numbers:
|
||||||
to_return += 'E'+str(e)
|
to_return += 'E' + str(e)
|
||||||
|
|
||||||
if self.air_by_date:
|
if self.air_by_date:
|
||||||
to_return += str(self.air_date)
|
to_return += str(self.air_date)
|
||||||
|
@ -381,7 +386,7 @@ class ParseResult(object):
|
||||||
if self.release_group:
|
if self.release_group:
|
||||||
to_return += ' (' + self.release_group + ')'
|
to_return += ' (' + self.release_group + ')'
|
||||||
|
|
||||||
to_return += ' [ABD: '+str(self.air_by_date)+']'
|
to_return += ' [ABD: ' + str(self.air_by_date) + ']'
|
||||||
|
|
||||||
return to_return.encode('utf-8')
|
return to_return.encode('utf-8')
|
||||||
|
|
||||||
|
@ -389,6 +394,7 @@ class ParseResult(object):
|
||||||
if self.season_number == None and len(self.episode_numbers) == 0 and self.air_date:
|
if self.season_number == None and len(self.episode_numbers) == 0 and self.air_date:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
air_by_date = property(_is_air_by_date)
|
air_by_date = property(_is_air_by_date)
|
||||||
|
|
||||||
def fix_scene_numbering(self):
|
def fix_scene_numbering(self):
|
||||||
|
@ -429,6 +435,7 @@ class ParseResult(object):
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
class NameParserCache(object):
|
class NameParserCache(object):
|
||||||
#TODO: check if the fifo list can beskiped and only use one dict
|
#TODO: check if the fifo list can beskiped and only use one dict
|
||||||
_previous_parsed_list = [] # keep a fifo list of the cached items
|
_previous_parsed_list = [] # keep a fifo list of the cached items
|
||||||
|
@ -449,7 +456,9 @@ class NameParserCache(object):
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
name_parser_cache = NameParserCache()
|
name_parser_cache = NameParserCache()
|
||||||
|
|
||||||
|
|
||||||
class InvalidNameException(Exception):
|
class InvalidNameException(Exception):
|
||||||
"The given name is not valid"
|
"The given name is not valid"
|
||||||
|
|
|
@ -196,4 +196,4 @@ ep_regexes = [
|
||||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||||
'''
|
'''
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -33,12 +33,13 @@ name_presets = ('%SN - %Sx%0E - %EN',
|
||||||
'%Sx%0E - %EN',
|
'%Sx%0E - %EN',
|
||||||
'S%0SE%0E - %EN',
|
'S%0SE%0E - %EN',
|
||||||
'Season %0S/%S.N.S%0SE%0E.%Q.N-%RG'
|
'Season %0S/%S.N.S%0SE%0E.%Q.N-%RG'
|
||||||
)
|
)
|
||||||
|
|
||||||
name_abd_presets = ('%SN - %A-D - %EN',
|
name_abd_presets = ('%SN - %A-D - %EN',
|
||||||
'%S.N.%A.D.%E.N.%Q.N',
|
'%S.N.%A.D.%E.N.%Q.N',
|
||||||
'%Y/%0M/%S.N.%A.D.%E.N-%RG'
|
'%Y/%0M/%S.N.%A.D.%E.N-%RG'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class TVShow():
|
class TVShow():
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -46,6 +47,7 @@ class TVShow():
|
||||||
self.genre = "Comedy"
|
self.genre = "Comedy"
|
||||||
self.air_by_date = 0
|
self.air_by_date = 0
|
||||||
|
|
||||||
|
|
||||||
class TVEpisode(tv.TVEpisode):
|
class TVEpisode(tv.TVEpisode):
|
||||||
def __init__(self, season, episode, name):
|
def __init__(self, season, episode, name):
|
||||||
self.relatedEps = []
|
self.relatedEps = []
|
||||||
|
@ -58,6 +60,7 @@ class TVEpisode(tv.TVEpisode):
|
||||||
self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
|
self._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
|
||||||
self._is_proper = True
|
self._is_proper = True
|
||||||
|
|
||||||
|
|
||||||
def check_force_season_folders(pattern=None, multi=None):
|
def check_force_season_folders(pattern=None, multi=None):
|
||||||
"""
|
"""
|
||||||
Checks if the name can still be parsed if you strip off the folders to determine if we need to force season folders
|
Checks if the name can still be parsed if you strip off the folders to determine if we need to force season folders
|
||||||
|
@ -75,6 +78,7 @@ def check_force_season_folders(pattern=None, multi=None):
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
|
||||||
def check_valid_naming(pattern=None, multi=None):
|
def check_valid_naming(pattern=None, multi=None):
|
||||||
"""
|
"""
|
||||||
Checks if the name is can be parsed back to its original form for both single and multi episodes.
|
Checks if the name is can be parsed back to its original form for both single and multi episodes.
|
||||||
|
@ -84,15 +88,16 @@ def check_valid_naming(pattern=None, multi=None):
|
||||||
if pattern == None:
|
if pattern == None:
|
||||||
pattern = sickbeard.NAMING_PATTERN
|
pattern = sickbeard.NAMING_PATTERN
|
||||||
|
|
||||||
logger.log(u"Checking whether the pattern "+pattern+" is valid for a single episode", logger.DEBUG)
|
logger.log(u"Checking whether the pattern " + pattern + " is valid for a single episode", logger.DEBUG)
|
||||||
valid = validate_name(pattern, None)
|
valid = validate_name(pattern, None)
|
||||||
|
|
||||||
if multi != None:
|
if multi != None:
|
||||||
logger.log(u"Checking whether the pattern "+pattern+" is valid for a multi episode", logger.DEBUG)
|
logger.log(u"Checking whether the pattern " + pattern + " is valid for a multi episode", logger.DEBUG)
|
||||||
valid = valid and validate_name(pattern, multi)
|
valid = valid and validate_name(pattern, multi)
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
|
||||||
def check_valid_abd_naming(pattern=None):
|
def check_valid_abd_naming(pattern=None):
|
||||||
"""
|
"""
|
||||||
Checks if the name is can be parsed back to its original form for an air-by-date format.
|
Checks if the name is can be parsed back to its original form for an air-by-date format.
|
||||||
|
@ -102,7 +107,7 @@ def check_valid_abd_naming(pattern=None):
|
||||||
if pattern == None:
|
if pattern == None:
|
||||||
pattern = sickbeard.NAMING_PATTERN
|
pattern = sickbeard.NAMING_PATTERN
|
||||||
|
|
||||||
logger.log(u"Checking whether the pattern "+pattern+" is valid for an air-by-date episode", logger.DEBUG)
|
logger.log(u"Checking whether the pattern " + pattern + " is valid for an air-by-date episode", logger.DEBUG)
|
||||||
valid = validate_name(pattern, abd=True)
|
valid = validate_name(pattern, abd=True)
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
@ -119,18 +124,18 @@ def validate_name(pattern, multi=None, file_only=False, abd=False):
|
||||||
new_name = ek.ek(os.path.join, new_path, new_name)
|
new_name = ek.ek(os.path.join, new_path, new_name)
|
||||||
|
|
||||||
if not new_name:
|
if not new_name:
|
||||||
logger.log(u"Unable to create a name out of "+pattern, logger.DEBUG)
|
logger.log(u"Unable to create a name out of " + pattern, logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.log(u"Trying to parse "+new_name, logger.DEBUG)
|
logger.log(u"Trying to parse " + new_name, logger.DEBUG)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = parser.parse(new_name)
|
result = parser.parse(new_name)
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
logger.log(u"Unable to parse "+new_name+", not valid", logger.DEBUG)
|
logger.log(u"Unable to parse " + new_name + ", not valid", logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.log("The name "+new_name + " parsed into " + str(result), logger.DEBUG)
|
logger.log("The name " + new_name + " parsed into " + str(result), logger.DEBUG)
|
||||||
|
|
||||||
if abd:
|
if abd:
|
||||||
if result.air_date != ep.airdate:
|
if result.air_date != ep.airdate:
|
||||||
|
@ -146,9 +151,10 @@ def validate_name(pattern, multi=None, file_only=False, abd=False):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _generate_sample_ep(multi=None, abd=False):
|
def _generate_sample_ep(multi=None, abd=False):
|
||||||
# make a fake episode object
|
# make a fake episode object
|
||||||
ep = TVEpisode(2,3,"Ep Name")
|
ep = TVEpisode(2, 3, "Ep Name")
|
||||||
ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
ep._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||||
ep._airdate = datetime.date(2011, 3, 9)
|
ep._airdate = datetime.date(2011, 3, 9)
|
||||||
if abd:
|
if abd:
|
||||||
|
@ -160,11 +166,11 @@ def _generate_sample_ep(multi=None, abd=False):
|
||||||
ep._name = "Ep Name (1)"
|
ep._name = "Ep Name (1)"
|
||||||
ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP'
|
ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP'
|
||||||
|
|
||||||
secondEp = TVEpisode(2,4,"Ep Name (2)")
|
secondEp = TVEpisode(2, 4, "Ep Name (2)")
|
||||||
secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||||
secondEp._release_name = ep._release_name
|
secondEp._release_name = ep._release_name
|
||||||
|
|
||||||
thirdEp = TVEpisode(2,5,"Ep Name (3)")
|
thirdEp = TVEpisode(2, 5, "Ep Name (3)")
|
||||||
thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||||
thirdEp._release_name = ep._release_name
|
thirdEp._release_name = ep._release_name
|
||||||
|
|
||||||
|
@ -173,8 +179,8 @@ def _generate_sample_ep(multi=None, abd=False):
|
||||||
|
|
||||||
return ep
|
return ep
|
||||||
|
|
||||||
def test_name(pattern, multi=None, abd=False):
|
|
||||||
|
|
||||||
|
def test_name(pattern, multi=None, abd=False):
|
||||||
ep = _generate_sample_ep(multi, abd)
|
ep = _generate_sample_ep(multi, abd)
|
||||||
|
|
||||||
return {'name': ep.formatted_filename(pattern, multi), 'dir': ep.formatted_dir(pattern, multi)}
|
return {'name': ep.formatted_filename(pattern, multi), 'dir': ep.formatted_dir(pattern, multi)}
|
|
@ -39,10 +39,11 @@ sb_timezone = tz.tzlocal()
|
||||||
# helper to remove failed temp download
|
# helper to remove failed temp download
|
||||||
def _remove_zoneinfo_failed(filename):
|
def _remove_zoneinfo_failed(filename):
|
||||||
try:
|
try:
|
||||||
ek.ek(os.remove,filename)
|
ek.ek(os.remove, filename)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# helper to remove old unneeded zoneinfo files
|
# helper to remove old unneeded zoneinfo files
|
||||||
def _remove_old_zoneinfo():
|
def _remove_old_zoneinfo():
|
||||||
if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
|
if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
|
||||||
|
@ -50,22 +51,23 @@ def _remove_old_zoneinfo():
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
cur_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
||||||
|
|
||||||
for (path, dirs, files) in ek.ek(os.walk,helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))):
|
for (path, dirs, files) in ek.ek(os.walk,
|
||||||
|
helpers.real_path(ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))):
|
||||||
for filename in files:
|
for filename in files:
|
||||||
if filename.endswith('.tar.gz'):
|
if filename.endswith('.tar.gz'):
|
||||||
file_w_path = ek.ek(join,path,filename)
|
file_w_path = ek.ek(join, path, filename)
|
||||||
if file_w_path != cur_file and ek.ek(isfile,file_w_path):
|
if file_w_path != cur_file and ek.ek(isfile, file_w_path):
|
||||||
try:
|
try:
|
||||||
ek.ek(os.remove,file_w_path)
|
ek.ek(os.remove, file_w_path)
|
||||||
logger.log(u"Delete unneeded old zoneinfo File: " + file_w_path)
|
logger.log(u"Delete unneeded old zoneinfo File: " + file_w_path)
|
||||||
except:
|
except:
|
||||||
logger.log(u"Unable to delete: " + file_w_path,logger.ERROR)
|
logger.log(u"Unable to delete: " + file_w_path, logger.ERROR)
|
||||||
|
|
||||||
|
|
||||||
# update the dateutil zoneinfo
|
# update the dateutil zoneinfo
|
||||||
def _update_zoneinfo():
|
def _update_zoneinfo():
|
||||||
|
|
||||||
global sb_timezone
|
global sb_timezone
|
||||||
sb_timezone = tz.tzlocal()
|
sb_timezone = tz.tzlocal()
|
||||||
|
|
||||||
|
@ -91,35 +93,36 @@ def _update_zoneinfo():
|
||||||
# now load the new zoneinfo
|
# now load the new zoneinfo
|
||||||
url_tar = u'https://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo
|
url_tar = u'https://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo
|
||||||
|
|
||||||
zonefile = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
|
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
|
||||||
zonefile_tmp = re.sub(r"\.tar\.gz$",'.tmp', zonefile)
|
zonefile_tmp = re.sub(r"\.tar\.gz$", '.tmp', zonefile)
|
||||||
|
|
||||||
if (ek.ek(os.path.exists,zonefile_tmp)):
|
if (ek.ek(os.path.exists, zonefile_tmp)):
|
||||||
try:
|
try:
|
||||||
ek.ek(os.remove,zonefile_tmp)
|
ek.ek(os.remove, zonefile_tmp)
|
||||||
except:
|
except:
|
||||||
logger.log(u"Unable to delete: " + zonefile_tmp,logger.ERROR)
|
logger.log(u"Unable to delete: " + zonefile_tmp, logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not helpers.download_file(url_tar, zonefile_tmp):
|
if not helpers.download_file(url_tar, zonefile_tmp):
|
||||||
return
|
return
|
||||||
|
|
||||||
if not ek.ek(os.path.exists,zonefile_tmp):
|
if not ek.ek(os.path.exists, zonefile_tmp):
|
||||||
logger.log(u"Download of " + zonefile_tmp + " failed.",logger.ERROR)
|
logger.log(u"Download of " + zonefile_tmp + " failed.", logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
new_hash = str(helpers.md5_for_file(zonefile_tmp))
|
new_hash = str(helpers.md5_for_file(zonefile_tmp))
|
||||||
|
|
||||||
if (zoneinfo_md5.upper() == new_hash.upper()):
|
if (zoneinfo_md5.upper() == new_hash.upper()):
|
||||||
logger.log(u"Updating timezone info with new one: " + new_zoneinfo,logger.MESSAGE)
|
logger.log(u"Updating timezone info with new one: " + new_zoneinfo, logger.MESSAGE)
|
||||||
try:
|
try:
|
||||||
# remove the old zoneinfo file
|
# remove the old zoneinfo file
|
||||||
if (cur_zoneinfo is not None):
|
if (cur_zoneinfo is not None):
|
||||||
old_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
old_file = helpers.real_path(
|
||||||
if (ek.ek(os.path.exists,old_file)):
|
ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
||||||
ek.ek(os.remove,old_file)
|
if (ek.ek(os.path.exists, old_file)):
|
||||||
|
ek.ek(os.remove, old_file)
|
||||||
# rename downloaded file
|
# rename downloaded file
|
||||||
ek.ek(os.rename,zonefile_tmp,zonefile)
|
ek.ek(os.rename, zonefile_tmp, zonefile)
|
||||||
# load the new zoneinfo
|
# load the new zoneinfo
|
||||||
reload(lib.dateutil.zoneinfo)
|
reload(lib.dateutil.zoneinfo)
|
||||||
sb_timezone = tz.tzlocal()
|
sb_timezone = tz.tzlocal()
|
||||||
|
@ -128,12 +131,12 @@ def _update_zoneinfo():
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
_remove_zoneinfo_failed(zonefile_tmp)
|
_remove_zoneinfo_failed(zonefile_tmp)
|
||||||
logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(),logger.ERROR)
|
logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(), logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
# update the network timezone table
|
# update the network timezone table
|
||||||
def update_network_dict():
|
def update_network_dict():
|
||||||
|
|
||||||
_remove_old_zoneinfo()
|
_remove_old_zoneinfo()
|
||||||
_update_zoneinfo()
|
_update_zoneinfo()
|
||||||
|
|
||||||
|
@ -152,7 +155,7 @@ def update_network_dict():
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for line in url_data.splitlines():
|
for line in url_data.splitlines():
|
||||||
(key, val) = line.decode('utf-8').strip().rsplit(u':',1)
|
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
|
||||||
if key is None or val is None:
|
if key is None or val is None:
|
||||||
continue
|
continue
|
||||||
d[key] = val
|
d[key] = val
|
||||||
|
@ -169,7 +172,8 @@ def update_network_dict():
|
||||||
h_k = old_d.has_key(cur_d)
|
h_k = old_d.has_key(cur_d)
|
||||||
if h_k and cur_t != old_d[cur_d]:
|
if h_k and cur_t != old_d[cur_d]:
|
||||||
# update old record
|
# update old record
|
||||||
ql.append(["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]])
|
ql.append(
|
||||||
|
["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]])
|
||||||
elif not h_k:
|
elif not h_k:
|
||||||
# add new record
|
# add new record
|
||||||
ql.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]])
|
ql.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]])
|
||||||
|
@ -178,12 +182,13 @@ def update_network_dict():
|
||||||
# remove deleted records
|
# remove deleted records
|
||||||
if len(old_d) > 0:
|
if len(old_d) > 0:
|
||||||
L = list(va for va in old_d)
|
L = list(va for va in old_d)
|
||||||
ql.append(["DELETE FROM network_timezones WHERE network_name IN ("+','.join(['?'] * len(L))+")", L])
|
ql.append(["DELETE FROM network_timezones WHERE network_name IN (" + ','.join(['?'] * len(L)) + ")", L])
|
||||||
# change all network timezone infos at once (much faster)
|
# change all network timezone infos at once (much faster)
|
||||||
if len(ql) > 0:
|
if len(ql) > 0:
|
||||||
myDB.mass_action(ql)
|
myDB.mass_action(ql)
|
||||||
load_network_dict()
|
load_network_dict()
|
||||||
|
|
||||||
|
|
||||||
# load network timezones from db into dict
|
# load network timezones from db into dict
|
||||||
def load_network_dict():
|
def load_network_dict():
|
||||||
d = {}
|
d = {}
|
||||||
|
@ -199,6 +204,7 @@ def load_network_dict():
|
||||||
global network_dict
|
global network_dict
|
||||||
network_dict = d
|
network_dict = d
|
||||||
|
|
||||||
|
|
||||||
# get timezone of a network or return default timezone
|
# get timezone of a network or return default timezone
|
||||||
def get_network_timezone(network, network_dict):
|
def get_network_timezone(network, network_dict):
|
||||||
if network is None:
|
if network is None:
|
||||||
|
@ -216,6 +222,7 @@ def get_network_timezone(network, network_dict):
|
||||||
except:
|
except:
|
||||||
return sb_timezone
|
return sb_timezone
|
||||||
|
|
||||||
|
|
||||||
# parse date and time string into local time
|
# parse date and time string into local time
|
||||||
def parse_date_time(d, t, network):
|
def parse_date_time(d, t, network):
|
||||||
if network_dict is None:
|
if network_dict is None:
|
||||||
|
@ -257,6 +264,7 @@ def parse_date_time(d, t, network):
|
||||||
except (ValueError):
|
except (ValueError):
|
||||||
return foreign_naive
|
return foreign_naive
|
||||||
|
|
||||||
|
|
||||||
def test_timeformat(t):
|
def test_timeformat(t):
|
||||||
mo = time_regex.search(t)
|
mo = time_regex.search(t)
|
||||||
if mo is None or len(mo.groups()) < 2:
|
if mo is None or len(mo.groups()) < 2:
|
||||||
|
|
|
@ -89,10 +89,12 @@ def notify_download(ep_name):
|
||||||
for n in notifiers:
|
for n in notifiers:
|
||||||
n.notify_download(ep_name)
|
n.notify_download(ep_name)
|
||||||
|
|
||||||
|
|
||||||
def notify_subtitle_download(ep_name, lang):
|
def notify_subtitle_download(ep_name, lang):
|
||||||
for n in notifiers:
|
for n in notifiers:
|
||||||
n.notify_subtitle_download(ep_name, lang)
|
n.notify_subtitle_download(ep_name, lang)
|
||||||
|
|
||||||
|
|
||||||
def notify_snatch(ep_name):
|
def notify_snatch(ep_name):
|
||||||
for n in notifiers:
|
for n in notifiers:
|
||||||
n.notify_snatch(ep_name)
|
n.notify_snatch(ep_name)
|
||||||
|
|
|
@ -28,8 +28,8 @@ from sickbeard.exceptions import ex
|
||||||
|
|
||||||
API_URL = "https://boxcar.io/devices/providers/fWc4sgSmpcN6JujtBmR6/notifications"
|
API_URL = "https://boxcar.io/devices/providers/fWc4sgSmpcN6JujtBmR6/notifications"
|
||||||
|
|
||||||
class BoxcarNotifier:
|
|
||||||
|
|
||||||
|
class BoxcarNotifier:
|
||||||
def test_notify(self, email, title="Test"):
|
def test_notify(self, email, title="Test"):
|
||||||
return self._sendBoxcar("This is a test notification from SickBeard", title, email)
|
return self._sendBoxcar("This is a test notification from SickBeard", title, email)
|
||||||
|
|
||||||
|
@ -146,4 +146,5 @@ class BoxcarNotifier:
|
||||||
self._sendBoxcar(message, title, username)
|
self._sendBoxcar(message, title, username)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
notifier = BoxcarNotifier
|
notifier = BoxcarNotifier
|
||||||
|
|
|
@ -31,6 +31,7 @@ from sickbeard import logger
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
|
|
||||||
class EmailNotifier:
|
class EmailNotifier:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.last_err = None
|
self.last_err = None
|
||||||
|
@ -57,14 +58,20 @@ class EmailNotifier:
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
msg = MIMEMultipart('alternative')
|
msg = MIMEMultipart('alternative')
|
||||||
msg.attach(MIMEText("<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Snatched</h3>\n<p>Show: <b>" + re.search("(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(".+ - (.+?-.+) -.+", ep_name).group(1) + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>", 'html'))
|
msg.attach(MIMEText(
|
||||||
|
"<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Snatched</h3>\n<p>Show: <b>" + re.search(
|
||||||
|
"(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(
|
||||||
|
".+ - (.+?-.+) -.+", ep_name).group(
|
||||||
|
1) + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>",
|
||||||
|
'html'))
|
||||||
except:
|
except:
|
||||||
msg = MIMEText(ep_name)
|
msg = MIMEText(ep_name)
|
||||||
|
|
||||||
msg['Subject'] = 'Snatched: ' + ep_name
|
msg['Subject'] = 'Snatched: ' + ep_name
|
||||||
msg['From'] = sickbeard.EMAIL_FROM
|
msg['From'] = sickbeard.EMAIL_FROM
|
||||||
msg['To'] = ','.join(to)
|
msg['To'] = ','.join(to)
|
||||||
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS,
|
||||||
|
sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
||||||
logger.log("Snatch notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG)
|
logger.log("Snatch notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
logger.log("Snatch notification ERROR: %s" % self.last_err, logger.ERROR)
|
logger.log("Snatch notification ERROR: %s" % self.last_err, logger.ERROR)
|
||||||
|
@ -84,14 +91,20 @@ class EmailNotifier:
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
msg = MIMEMultipart('alternative')
|
msg = MIMEMultipart('alternative')
|
||||||
msg.attach(MIMEText("<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Downloaded</h3>\n<p>Show: <b>" + re.search("(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(".+ - (.+?-.+) -.+", ep_name).group(1) + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>", 'html'))
|
msg.attach(MIMEText(
|
||||||
|
"<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Downloaded</h3>\n<p>Show: <b>" + re.search(
|
||||||
|
"(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(
|
||||||
|
".+ - (.+?-.+) -.+", ep_name).group(
|
||||||
|
1) + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>",
|
||||||
|
'html'))
|
||||||
except:
|
except:
|
||||||
msg = MIMEText(ep_name)
|
msg = MIMEText(ep_name)
|
||||||
|
|
||||||
msg['Subject'] = 'Downloaded: ' + ep_name
|
msg['Subject'] = 'Downloaded: ' + ep_name
|
||||||
msg['From'] = sickbeard.EMAIL_FROM
|
msg['From'] = sickbeard.EMAIL_FROM
|
||||||
msg['To'] = ','.join(to)
|
msg['To'] = ','.join(to)
|
||||||
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS,
|
||||||
|
sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
||||||
logger.log("Download notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG)
|
logger.log("Download notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
logger.log("Download notification ERROR: %s" % self.last_err, logger.ERROR)
|
logger.log("Download notification ERROR: %s" % self.last_err, logger.ERROR)
|
||||||
|
@ -111,14 +124,20 @@ class EmailNotifier:
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
msg = MIMEMultipart('alternative')
|
msg = MIMEMultipart('alternative')
|
||||||
msg.attach(MIMEText("<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Subtitle Downloaded</h3>\n<p>Show: <b>" + re.search("(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(".+ - (.+?-.+) -.+", ep_name).group(1) + "</b></p>\n<p>Language: <b>" + lang + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>", 'html'))
|
msg.attach(MIMEText(
|
||||||
|
"<body style='font-family:Helvetica, Arial, sans-serif;'><h3>Sick Beard Notification - Subtitle Downloaded</h3>\n<p>Show: <b>" + re.search(
|
||||||
|
"(.+?) -.+", ep_name).group(1) + "</b></p>\n<p>Episode: <b>" + re.search(
|
||||||
|
".+ - (.+?-.+) -.+", ep_name).group(
|
||||||
|
1) + "</b></p>\n<p>Language: <b>" + lang + "</b></p>\n\n<footer style='margin-top: 2.5em; padding: .7em 0; color: #777; border-top: #BBB solid 1px;'>Powered by Sick Beard.</footer></body>",
|
||||||
|
'html'))
|
||||||
except:
|
except:
|
||||||
msg = MIMEText(ep_name + ": " + lang)
|
msg = MIMEText(ep_name + ": " + lang)
|
||||||
|
|
||||||
msg['Subject'] = lang + ' Subtitle Downloaded: ' + ep_name
|
msg['Subject'] = lang + ' Subtitle Downloaded: ' + ep_name
|
||||||
msg['From'] = sickbeard.EMAIL_FROM
|
msg['From'] = sickbeard.EMAIL_FROM
|
||||||
msg['To'] = ','.join(to)
|
msg['To'] = ','.join(to)
|
||||||
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS, sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
if self._sendmail(sickbeard.EMAIL_HOST, sickbeard.EMAIL_PORT, sickbeard.EMAIL_FROM, sickbeard.EMAIL_TLS,
|
||||||
|
sickbeard.EMAIL_USER, sickbeard.EMAIL_PASSWORD, to, msg):
|
||||||
logger.log("Download notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG)
|
logger.log("Download notification sent to [%s] for '%s'" % (to, ep_name), logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
logger.log("Download notification ERROR: %s" % self.last_err, logger.ERROR)
|
logger.log("Download notification ERROR: %s" % self.last_err, logger.ERROR)
|
||||||
|
@ -128,7 +147,7 @@ class EmailNotifier:
|
||||||
|
|
||||||
# Grab the global recipients
|
# Grab the global recipients
|
||||||
for addr in sickbeard.EMAIL_LIST.split(','):
|
for addr in sickbeard.EMAIL_LIST.split(','):
|
||||||
if(len(addr.strip()) > 0):
|
if (len(addr.strip()) > 0):
|
||||||
addrs.append(addr)
|
addrs.append(addr)
|
||||||
|
|
||||||
# Grab the recipients for the show
|
# Grab the recipients for the show
|
||||||
|
@ -137,7 +156,7 @@ class EmailNotifier:
|
||||||
for subs in mydb.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (s,)):
|
for subs in mydb.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (s,)):
|
||||||
if subs['notify_list']:
|
if subs['notify_list']:
|
||||||
for addr in subs['notify_list'].split(','):
|
for addr in subs['notify_list'].split(','):
|
||||||
if(len(addr.strip()) > 0):
|
if (len(addr.strip()) > 0):
|
||||||
addrs.append(addr)
|
addrs.append(addr)
|
||||||
|
|
||||||
addrs = set(addrs)
|
addrs = set(addrs)
|
||||||
|
@ -145,7 +164,8 @@ class EmailNotifier:
|
||||||
return addrs
|
return addrs
|
||||||
|
|
||||||
def _sendmail(self, host, port, smtp_from, use_tls, user, pwd, to, msg, smtpDebug=False):
|
def _sendmail(self, host, port, smtp_from, use_tls, user, pwd, to, msg, smtpDebug=False):
|
||||||
logger.log('HOST: %s; PORT: %s; FROM: %s, TLS: %s, USER: %s, PWD: %s, TO: %s' % (host, port, smtp_from, use_tls, user, pwd, to), logger.DEBUG)
|
logger.log('HOST: %s; PORT: %s; FROM: %s, TLS: %s, USER: %s, PWD: %s, TO: %s' % (
|
||||||
|
host, port, smtp_from, use_tls, user, pwd, to), logger.DEBUG)
|
||||||
srv = smtplib.SMTP(host, int(port))
|
srv = smtplib.SMTP(host, int(port))
|
||||||
if smtpDebug:
|
if smtpDebug:
|
||||||
srv.set_debuglevel(1)
|
srv.set_debuglevel(1)
|
||||||
|
@ -173,4 +193,5 @@ class EmailNotifier:
|
||||||
logger.log("TITLES: %s" % titles, logger.DEBUG)
|
logger.log("TITLES: %s" % titles, logger.DEBUG)
|
||||||
return titles
|
return titles
|
||||||
|
|
||||||
|
|
||||||
notifier = EmailNotifier
|
notifier = EmailNotifier
|
||||||
|
|
|
@ -25,11 +25,12 @@ from sickbeard.exceptions import ex
|
||||||
|
|
||||||
from lib.growl import gntp
|
from lib.growl import gntp
|
||||||
|
|
||||||
class GrowlNotifier:
|
|
||||||
|
|
||||||
|
class GrowlNotifier:
|
||||||
def test_notify(self, host, password):
|
def test_notify(self, host, password):
|
||||||
self._sendRegistration(host, password, 'Test')
|
self._sendRegistration(host, password, 'Test')
|
||||||
return self._sendGrowl("Test Growl", "Testing Growl settings from Sick Beard", "Test", host, password, force=True)
|
return self._sendGrowl("Test Growl", "Testing Growl settings from Sick Beard", "Test", host, password,
|
||||||
|
force=True)
|
||||||
|
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
if sickbeard.GROWL_NOTIFY_ONSNATCH:
|
if sickbeard.GROWL_NOTIFY_ONSNATCH:
|
||||||
|
@ -43,48 +44,50 @@ class GrowlNotifier:
|
||||||
if sickbeard.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD:
|
if sickbeard.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||||
self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ": " + lang)
|
self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ": " + lang)
|
||||||
|
|
||||||
def _send_growl(self, options,message=None):
|
def _send_growl(self, options, message=None):
|
||||||
|
|
||||||
#Send Notification
|
#Send Notification
|
||||||
notice = gntp.GNTPNotice()
|
notice = gntp.GNTPNotice()
|
||||||
|
|
||||||
#Required
|
#Required
|
||||||
notice.add_header('Application-Name',options['app'])
|
notice.add_header('Application-Name', options['app'])
|
||||||
notice.add_header('Notification-Name',options['name'])
|
notice.add_header('Notification-Name', options['name'])
|
||||||
notice.add_header('Notification-Title',options['title'])
|
notice.add_header('Notification-Title', options['title'])
|
||||||
|
|
||||||
if options['password']:
|
if options['password']:
|
||||||
notice.set_password(options['password'])
|
notice.set_password(options['password'])
|
||||||
|
|
||||||
#Optional
|
#Optional
|
||||||
if options['sticky']:
|
if options['sticky']:
|
||||||
notice.add_header('Notification-Sticky',options['sticky'])
|
notice.add_header('Notification-Sticky', options['sticky'])
|
||||||
if options['priority']:
|
if options['priority']:
|
||||||
notice.add_header('Notification-Priority',options['priority'])
|
notice.add_header('Notification-Priority', options['priority'])
|
||||||
if options['icon']:
|
if options['icon']:
|
||||||
notice.add_header('Notification-Icon', 'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png')
|
notice.add_header('Notification-Icon',
|
||||||
|
'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png')
|
||||||
|
|
||||||
if message:
|
if message:
|
||||||
notice.add_header('Notification-Text',message)
|
notice.add_header('Notification-Text', message)
|
||||||
|
|
||||||
response = self._send(options['host'],options['port'],notice.encode(),options['debug'])
|
response = self._send(options['host'], options['port'], notice.encode(), options['debug'])
|
||||||
if isinstance(response,gntp.GNTPOK): return True
|
if isinstance(response, gntp.GNTPOK): return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _send(self, host,port,data,debug=False):
|
def _send(self, host, port, data, debug=False):
|
||||||
if debug: print '<Sending>\n',data,'\n</Sending>'
|
if debug: print '<Sending>\n', data, '\n</Sending>'
|
||||||
|
|
||||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
s.connect((host,port))
|
s.connect((host, port))
|
||||||
s.send(data)
|
s.send(data)
|
||||||
response = gntp.parse_gntp(s.recv(1024))
|
response = gntp.parse_gntp(s.recv(1024))
|
||||||
s.close()
|
s.close()
|
||||||
|
|
||||||
if debug: print '<Recieved>\n',response,'\n</Recieved>'
|
if debug: print '<Recieved>\n', response, '\n</Recieved>'
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _sendGrowl(self, title="Sick Beard Notification", message=None, name=None, host=None, password=None, force=False):
|
def _sendGrowl(self, title="Sick Beard Notification", message=None, name=None, host=None, password=None,
|
||||||
|
force=False):
|
||||||
if not sickbeard.USE_GROWL and not force:
|
if not sickbeard.USE_GROWL and not force:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -101,7 +104,7 @@ class GrowlNotifier:
|
||||||
else:
|
else:
|
||||||
port = int(hostParts[1])
|
port = int(hostParts[1])
|
||||||
|
|
||||||
growlHosts = [(hostParts[0],port)]
|
growlHosts = [(hostParts[0], port)]
|
||||||
|
|
||||||
opts = {}
|
opts = {}
|
||||||
|
|
||||||
|
@ -121,11 +124,10 @@ class GrowlNotifier:
|
||||||
|
|
||||||
opts['icon'] = True
|
opts['icon'] = True
|
||||||
|
|
||||||
|
|
||||||
for pc in growlHosts:
|
for pc in growlHosts:
|
||||||
opts['host'] = pc[0]
|
opts['host'] = pc[0]
|
||||||
opts['port'] = pc[1]
|
opts['port'] = pc[1]
|
||||||
logger.log(u"Sending growl to "+opts['host']+":"+str(opts['port'])+": "+message)
|
logger.log(u"Sending growl to " + opts['host'] + ":" + str(opts['port']) + ": " + message)
|
||||||
try:
|
try:
|
||||||
if self._send_growl(opts, message):
|
if self._send_growl(opts, message):
|
||||||
return True
|
return True
|
||||||
|
@ -135,7 +137,7 @@ class GrowlNotifier:
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
except socket.error, e:
|
except socket.error, e:
|
||||||
logger.log(u"Unable to send growl to "+opts['host']+":"+str(opts['port'])+": "+ex(e))
|
logger.log(u"Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + ": " + ex(e))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _sendRegistration(self, host=None, password=None, name='Sick Beard Notification'):
|
def _sendRegistration(self, host=None, password=None, name='Sick Beard Notification'):
|
||||||
|
@ -154,20 +156,19 @@ class GrowlNotifier:
|
||||||
opts['host'] = hostParts[0]
|
opts['host'] = hostParts[0]
|
||||||
opts['port'] = port
|
opts['port'] = port
|
||||||
|
|
||||||
|
|
||||||
if password == None:
|
if password == None:
|
||||||
opts['password'] = sickbeard.GROWL_PASSWORD
|
opts['password'] = sickbeard.GROWL_PASSWORD
|
||||||
else:
|
else:
|
||||||
opts['password'] = password
|
opts['password'] = password
|
||||||
|
|
||||||
|
|
||||||
opts['app'] = 'SickBeard'
|
opts['app'] = 'SickBeard'
|
||||||
opts['debug'] = False
|
opts['debug'] = False
|
||||||
|
|
||||||
#Send Registration
|
#Send Registration
|
||||||
register = gntp.GNTPRegister()
|
register = gntp.GNTPRegister()
|
||||||
register.add_header('Application-Name', opts['app'])
|
register.add_header('Application-Name', opts['app'])
|
||||||
register.add_header('Application-Icon', 'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png')
|
register.add_header('Application-Icon',
|
||||||
|
'https://raw.github.com/midgetspy/Sick-Beard/master/data/images/sickbeard.png')
|
||||||
|
|
||||||
register.add_notification('Test', True)
|
register.add_notification('Test', True)
|
||||||
register.add_notification(common.notifyStrings[common.NOTIFY_SNATCH], True)
|
register.add_notification(common.notifyStrings[common.NOTIFY_SNATCH], True)
|
||||||
|
@ -177,11 +178,11 @@ class GrowlNotifier:
|
||||||
register.set_password(opts['password'])
|
register.set_password(opts['password'])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self._send(opts['host'],opts['port'],register.encode(),opts['debug'])
|
return self._send(opts['host'], opts['port'], register.encode(), opts['debug'])
|
||||||
except socket.error, e:
|
except socket.error, e:
|
||||||
logger.log(u"Unable to send growl to "+opts['host']+":"+str(opts['port'])+": "+str(e).decode('utf-8'))
|
logger.log(
|
||||||
|
u"Unable to send growl to " + opts['host'] + ":" + str(opts['port']) + ": " + str(e).decode('utf-8'))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
notifier = GrowlNotifier
|
notifier = GrowlNotifier
|
|
@ -22,6 +22,7 @@ import sickbeard
|
||||||
|
|
||||||
from sickbeard import logger, common
|
from sickbeard import logger, common
|
||||||
|
|
||||||
|
|
||||||
def diagnose():
|
def diagnose():
|
||||||
'''
|
'''
|
||||||
Check the environment for reasons libnotify isn't working. Return a
|
Check the environment for reasons libnotify isn't working. Return a
|
||||||
|
@ -115,4 +116,5 @@ class LibnotifyNotifier:
|
||||||
except self.gobject.GError:
|
except self.gobject.GError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
notifier = LibnotifyNotifier
|
notifier = LibnotifyNotifier
|
||||||
|
|
|
@ -3,22 +3,26 @@ import sickbeard
|
||||||
from sickbeard import logger, common
|
from sickbeard import logger, common
|
||||||
from lib.pynma import pynma
|
from lib.pynma import pynma
|
||||||
|
|
||||||
class NMA_Notifier:
|
|
||||||
|
|
||||||
|
class NMA_Notifier:
|
||||||
def test_notify(self, nma_api, nma_priority):
|
def test_notify(self, nma_api, nma_priority):
|
||||||
return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Sick Beard", force=True)
|
return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from Sick Beard",
|
||||||
|
force=True)
|
||||||
|
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
if sickbeard.NMA_NOTIFY_ONSNATCH:
|
if sickbeard.NMA_NOTIFY_ONSNATCH:
|
||||||
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name)
|
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH],
|
||||||
|
message=ep_name)
|
||||||
|
|
||||||
def notify_download(self, ep_name):
|
def notify_download(self, ep_name):
|
||||||
if sickbeard.NMA_NOTIFY_ONDOWNLOAD:
|
if sickbeard.NMA_NOTIFY_ONDOWNLOAD:
|
||||||
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name)
|
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD],
|
||||||
|
message=ep_name)
|
||||||
|
|
||||||
def notify_subtitle_download(self, ep_name, lang):
|
def notify_subtitle_download(self, ep_name, lang):
|
||||||
if sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD:
|
if sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||||
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
|
self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD],
|
||||||
|
message=ep_name + ": " + lang)
|
||||||
|
|
||||||
def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False):
|
def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False):
|
||||||
|
|
||||||
|
@ -53,4 +57,5 @@ class NMA_Notifier:
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
notifier = NMA_Notifier
|
notifier = NMA_Notifier
|
|
@ -78,7 +78,8 @@ class NMJNotifier:
|
||||||
logger.log(u"Found mounting url on the Popcorn Hour in configuration: %s" % (mount), logger.DEBUG)
|
logger.log(u"Found mounting url on the Popcorn Hour in configuration: %s" % (mount), logger.DEBUG)
|
||||||
sickbeard.NMJ_MOUNT = mount
|
sickbeard.NMJ_MOUNT = mount
|
||||||
else:
|
else:
|
||||||
logger.log(u"Detected a network share on the Popcorn Hour, but could not get the mounting url", logger.DEBUG)
|
logger.log(u"Detected a network share on the Popcorn Hour, but could not get the mounting url",
|
||||||
|
logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -180,4 +181,5 @@ class NMJNotifier:
|
||||||
|
|
||||||
return self._sendNMJ(host, database, mount)
|
return self._sendNMJ(host, database, mount)
|
||||||
|
|
||||||
|
|
||||||
notifier = NMJNotifier
|
notifier = NMJNotifier
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import urllib, urllib2,xml.dom.minidom
|
import urllib, urllib2, xml.dom.minidom
|
||||||
from xml.dom.minidom import parseString
|
from xml.dom.minidom import parseString
|
||||||
import sickbeard
|
import sickbeard
|
||||||
import telnetlib
|
import telnetlib
|
||||||
|
@ -33,7 +33,6 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
class NMJv2Notifier:
|
class NMJv2Notifier:
|
||||||
|
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
return False
|
return False
|
||||||
#Not implemented: Start the scanner when snatched does not make any sense
|
#Not implemented: Start the scanner when snatched does not make any sense
|
||||||
|
@ -58,30 +57,33 @@ class NMJv2Notifier:
|
||||||
Returns: True if the settings were retrieved successfully, False otherwise
|
Returns: True if the settings were retrieved successfully, False otherwise
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
url_loc = "http://" + host + ":8008/file_operation?arg0=list_user_storage_file&arg1=&arg2="+instance+"&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false"
|
url_loc = "http://" + host + ":8008/file_operation?arg0=list_user_storage_file&arg1=&arg2=" + instance + "&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false"
|
||||||
req = urllib2.Request(url_loc)
|
req = urllib2.Request(url_loc)
|
||||||
handle1 = urllib2.urlopen(req)
|
handle1 = urllib2.urlopen(req)
|
||||||
response1 = handle1.read()
|
response1 = handle1.read()
|
||||||
xml = parseString(response1)
|
xml = parseString(response1)
|
||||||
time.sleep (300.0 / 1000.0)
|
time.sleep(300.0 / 1000.0)
|
||||||
for node in xml.getElementsByTagName('path'):
|
for node in xml.getElementsByTagName('path'):
|
||||||
xmlTag=node.toxml();
|
xmlTag = node.toxml();
|
||||||
xmlData=xmlTag.replace('<path>','').replace('</path>','').replace('[=]','')
|
xmlData = xmlTag.replace('<path>', '').replace('</path>', '').replace('[=]', '')
|
||||||
url_db = "http://" + host + ":8008/metadata_database?arg0=check_database&arg1="+ xmlData
|
url_db = "http://" + host + ":8008/metadata_database?arg0=check_database&arg1=" + xmlData
|
||||||
reqdb = urllib2.Request(url_db)
|
reqdb = urllib2.Request(url_db)
|
||||||
handledb = urllib2.urlopen(reqdb)
|
handledb = urllib2.urlopen(reqdb)
|
||||||
responsedb = handledb.read()
|
responsedb = handledb.read()
|
||||||
xmldb = parseString(responsedb)
|
xmldb = parseString(responsedb)
|
||||||
returnvalue=xmldb.getElementsByTagName('returnValue')[0].toxml().replace('<returnValue>','').replace('</returnValue>','')
|
returnvalue = xmldb.getElementsByTagName('returnValue')[0].toxml().replace('<returnValue>', '').replace(
|
||||||
if returnvalue=="0":
|
'</returnValue>', '')
|
||||||
DB_path=xmldb.getElementsByTagName('database_path')[0].toxml().replace('<database_path>','').replace('</database_path>','').replace('[=]','')
|
if returnvalue == "0":
|
||||||
if dbloc=="local" and DB_path.find("localhost")>-1:
|
DB_path = xmldb.getElementsByTagName('database_path')[0].toxml().replace('<database_path>',
|
||||||
sickbeard.NMJv2_HOST=host
|
'').replace(
|
||||||
sickbeard.NMJv2_DATABASE=DB_path
|
'</database_path>', '').replace('[=]', '')
|
||||||
|
if dbloc == "local" and DB_path.find("localhost") > -1:
|
||||||
|
sickbeard.NMJv2_HOST = host
|
||||||
|
sickbeard.NMJv2_DATABASE = DB_path
|
||||||
return True
|
return True
|
||||||
if dbloc=="network" and DB_path.find("://")>-1:
|
if dbloc == "network" and DB_path.find("://") > -1:
|
||||||
sickbeard.NMJv2_HOST=host
|
sickbeard.NMJv2_HOST = host
|
||||||
sickbeard.NMJv2_DATABASE=DB_path
|
sickbeard.NMJv2_DATABASE = DB_path
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except IOError, e:
|
except IOError, e:
|
||||||
|
@ -102,15 +104,15 @@ class NMJv2Notifier:
|
||||||
|
|
||||||
#if a host is provided then attempt to open a handle to that URL
|
#if a host is provided then attempt to open a handle to that URL
|
||||||
try:
|
try:
|
||||||
url_scandir = "http://" + host + ":8008/metadata_database?arg0=update_scandir&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=&arg3=update_all"
|
url_scandir = "http://" + host + ":8008/metadata_database?arg0=update_scandir&arg1=" + sickbeard.NMJv2_DATABASE + "&arg2=&arg3=update_all"
|
||||||
logger.log(u"NMJ scan update command send to host: %s" % (host))
|
logger.log(u"NMJ scan update command send to host: %s" % (host))
|
||||||
url_updatedb = "http://" + host + ":8008/metadata_database?arg0=scanner_start&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=background&arg3="
|
url_updatedb = "http://" + host + ":8008/metadata_database?arg0=scanner_start&arg1=" + sickbeard.NMJv2_DATABASE + "&arg2=background&arg3="
|
||||||
logger.log(u"Try to mount network drive via url: %s" % (host), logger.DEBUG)
|
logger.log(u"Try to mount network drive via url: %s" % (host), logger.DEBUG)
|
||||||
prereq = urllib2.Request(url_scandir)
|
prereq = urllib2.Request(url_scandir)
|
||||||
req = urllib2.Request(url_updatedb)
|
req = urllib2.Request(url_updatedb)
|
||||||
handle1 = urllib2.urlopen(prereq)
|
handle1 = urllib2.urlopen(prereq)
|
||||||
response1 = handle1.read()
|
response1 = handle1.read()
|
||||||
time.sleep (300.0 / 1000.0)
|
time.sleep(300.0 / 1000.0)
|
||||||
handle2 = urllib2.urlopen(req)
|
handle2 = urllib2.urlopen(req)
|
||||||
response2 = handle2.read()
|
response2 = handle2.read()
|
||||||
except IOError, e:
|
except IOError, e:
|
||||||
|
@ -130,8 +132,8 @@ class NMJv2Notifier:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# if the result was a number then consider that an error
|
# if the result was a number then consider that an error
|
||||||
error_codes=["8","11","22","49","50","51","60"]
|
error_codes = ["8", "11", "22", "49", "50", "51", "60"]
|
||||||
error_messages=["Invalid parameter(s)/argument(s)",
|
error_messages = ["Invalid parameter(s)/argument(s)",
|
||||||
"Invalid database path",
|
"Invalid database path",
|
||||||
"Insufficient size",
|
"Insufficient size",
|
||||||
"Database write error",
|
"Database write error",
|
||||||
|
@ -139,12 +141,12 @@ class NMJv2Notifier:
|
||||||
"Open fifo pipe failed",
|
"Open fifo pipe failed",
|
||||||
"Read only file system"]
|
"Read only file system"]
|
||||||
if int(result1) > 0:
|
if int(result1) > 0:
|
||||||
index=error_codes.index(result1)
|
index = error_codes.index(result1)
|
||||||
logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]))
|
logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]))
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
if int(result2) > 0:
|
if int(result2) > 0:
|
||||||
index=error_codes.index(result2)
|
index = error_codes.index(result2)
|
||||||
logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]))
|
logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]))
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
|
@ -172,4 +174,5 @@ class NMJv2Notifier:
|
||||||
|
|
||||||
return self._sendNMJ(host)
|
return self._sendNMJ(host)
|
||||||
|
|
||||||
|
|
||||||
notifier = NMJv2Notifier
|
notifier = NMJv2Notifier
|
||||||
|
|
|
@ -32,7 +32,6 @@ from xml.dom import minidom
|
||||||
|
|
||||||
|
|
||||||
class PLEXNotifier:
|
class PLEXNotifier:
|
||||||
|
|
||||||
def _send_to_plex(self, command, host, username=None, password=None):
|
def _send_to_plex(self, command, host, username=None, password=None):
|
||||||
"""Handles communication to Plex hosts via HTTP API
|
"""Handles communication to Plex hosts via HTTP API
|
||||||
|
|
||||||
|
@ -127,16 +126,17 @@ class PLEXNotifier:
|
||||||
for curHost in [x.strip() for x in host.split(",")]:
|
for curHost in [x.strip() for x in host.split(",")]:
|
||||||
logger.log(u"Sending Plex notification to '" + curHost + "' - " + message, logger.MESSAGE)
|
logger.log(u"Sending Plex notification to '" + curHost + "' - " + message, logger.MESSAGE)
|
||||||
|
|
||||||
command = {'command': 'ExecBuiltIn', 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')'}
|
command = {'command': 'ExecBuiltIn',
|
||||||
|
'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')'}
|
||||||
notifyResult = self._send_to_plex(command, curHost, username, password)
|
notifyResult = self._send_to_plex(command, curHost, username, password)
|
||||||
if notifyResult:
|
if notifyResult:
|
||||||
result += curHost + ':' + str(notifyResult)
|
result += curHost + ':' + str(notifyResult)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# Public functions
|
# Public functions
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
if sickbeard.PLEX_NOTIFY_ONSNATCH:
|
if sickbeard.PLEX_NOTIFY_ONSNATCH:
|
||||||
|
@ -151,7 +151,8 @@ class PLEXNotifier:
|
||||||
self._notify_pmc(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD])
|
self._notify_pmc(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD])
|
||||||
|
|
||||||
def test_notify(self, host, username, password):
|
def test_notify(self, host, username, password):
|
||||||
return self._notify_pmc("Testing Plex notifications from Sick Beard", "Test Notification", host, username, password, force=True)
|
return self._notify_pmc("Testing Plex notifications from Sick Beard", "Test Notification", host, username,
|
||||||
|
password, force=True)
|
||||||
|
|
||||||
def update_library(self):
|
def update_library(self):
|
||||||
"""Handles updating the Plex Media Server host via HTTP API
|
"""Handles updating the Plex Media Server host via HTTP API
|
||||||
|
@ -168,7 +169,8 @@ class PLEXNotifier:
|
||||||
logger.log(u"No Plex Server host specified, check your settings", logger.DEBUG)
|
logger.log(u"No Plex Server host specified, check your settings", logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.log(u"Updating library for the Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST, logger.MESSAGE)
|
logger.log(u"Updating library for the Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST,
|
||||||
|
logger.MESSAGE)
|
||||||
|
|
||||||
url = "http://%s/library/sections" % sickbeard.PLEX_SERVER_HOST
|
url = "http://%s/library/sections" % sickbeard.PLEX_SERVER_HOST
|
||||||
try:
|
try:
|
||||||
|
@ -193,4 +195,5 @@ class PLEXNotifier:
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
notifier = PLEXNotifier
|
notifier = PLEXNotifier
|
||||||
|
|
|
@ -31,22 +31,26 @@ import sickbeard
|
||||||
|
|
||||||
from sickbeard import logger, common
|
from sickbeard import logger, common
|
||||||
|
|
||||||
class ProwlNotifier:
|
|
||||||
|
|
||||||
|
class ProwlNotifier:
|
||||||
def test_notify(self, prowl_api, prowl_priority):
|
def test_notify(self, prowl_api, prowl_priority):
|
||||||
return self._sendProwl(prowl_api, prowl_priority, event="Test", message="Testing Prowl settings from Sick Beard", force=True)
|
return self._sendProwl(prowl_api, prowl_priority, event="Test",
|
||||||
|
message="Testing Prowl settings from Sick Beard", force=True)
|
||||||
|
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
if sickbeard.PROWL_NOTIFY_ONSNATCH:
|
if sickbeard.PROWL_NOTIFY_ONSNATCH:
|
||||||
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name)
|
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH],
|
||||||
|
message=ep_name)
|
||||||
|
|
||||||
def notify_download(self, ep_name):
|
def notify_download(self, ep_name):
|
||||||
if sickbeard.PROWL_NOTIFY_ONDOWNLOAD:
|
if sickbeard.PROWL_NOTIFY_ONDOWNLOAD:
|
||||||
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name)
|
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD],
|
||||||
|
message=ep_name)
|
||||||
|
|
||||||
def notify_subtitle_download(self, ep_name, lang):
|
def notify_subtitle_download(self, ep_name, lang):
|
||||||
if sickbeard.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD:
|
if sickbeard.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||||
self._sendProwl(prowl_api=None, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
|
self._sendProwl(prowl_api=None, prowl_priority=None,
|
||||||
|
event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
|
||||||
|
|
||||||
def _sendProwl(self, prowl_api=None, prowl_priority=None, event=None, message=None, force=False):
|
def _sendProwl(self, prowl_api=None, prowl_priority=None, event=None, message=None, force=False):
|
||||||
|
|
||||||
|
@ -59,7 +63,6 @@ class ProwlNotifier:
|
||||||
if prowl_priority == None:
|
if prowl_priority == None:
|
||||||
prowl_priority = sickbeard.PROWL_PRIORITY
|
prowl_priority = sickbeard.PROWL_PRIORITY
|
||||||
|
|
||||||
|
|
||||||
title = "Sick Beard"
|
title = "Sick Beard"
|
||||||
|
|
||||||
logger.log(u"Prowl title: " + title, logger.DEBUG)
|
logger.log(u"Prowl title: " + title, logger.DEBUG)
|
||||||
|
@ -74,13 +77,13 @@ class ProwlNotifier:
|
||||||
'application': title,
|
'application': title,
|
||||||
'event': event,
|
'event': event,
|
||||||
'description': message.encode('utf-8'),
|
'description': message.encode('utf-8'),
|
||||||
'priority': prowl_priority }
|
'priority': prowl_priority}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
http_handler.request("POST",
|
http_handler.request("POST",
|
||||||
"/publicapi/add",
|
"/publicapi/add",
|
||||||
headers = {'Content-type': "application/x-www-form-urlencoded"},
|
headers={'Content-type': "application/x-www-form-urlencoded"},
|
||||||
body = urlencode(data))
|
body=urlencode(data))
|
||||||
except (SSLError, HTTPException):
|
except (SSLError, HTTPException):
|
||||||
logger.log(u"Prowl notification failed.", logger.ERROR)
|
logger.log(u"Prowl notification failed.", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
@ -97,4 +100,5 @@ class ProwlNotifier:
|
||||||
logger.log(u"Prowl notification failed.", logger.ERROR)
|
logger.log(u"Prowl notification failed.", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
notifier = ProwlNotifier
|
notifier = ProwlNotifier
|
||||||
|
|
|
@ -24,22 +24,27 @@ from ssl import SSLError
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard import logger, common
|
from sickbeard import logger, common
|
||||||
|
|
||||||
class PushalotNotifier:
|
|
||||||
|
|
||||||
|
class PushalotNotifier:
|
||||||
def test_notify(self, pushalot_authorizationtoken):
|
def test_notify(self, pushalot_authorizationtoken):
|
||||||
return self._sendPushalot(pushalot_authorizationtoken, event="Test", message="Testing Pushalot settings from Sick Beard", force=True)
|
return self._sendPushalot(pushalot_authorizationtoken, event="Test",
|
||||||
|
message="Testing Pushalot settings from Sick Beard", force=True)
|
||||||
|
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
if sickbeard.PUSHALOT_NOTIFY_ONSNATCH:
|
if sickbeard.PUSHALOT_NOTIFY_ONSNATCH:
|
||||||
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name)
|
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SNATCH],
|
||||||
|
message=ep_name)
|
||||||
|
|
||||||
def notify_download(self, ep_name):
|
def notify_download(self, ep_name):
|
||||||
if sickbeard.PUSHALOT_NOTIFY_ONDOWNLOAD:
|
if sickbeard.PUSHALOT_NOTIFY_ONDOWNLOAD:
|
||||||
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name)
|
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD],
|
||||||
|
message=ep_name)
|
||||||
|
|
||||||
def notify_subtitle_download(self, ep_name, lang):
|
def notify_subtitle_download(self, ep_name, lang):
|
||||||
if sickbeard.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD:
|
if sickbeard.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||||
self._sendPushalot(pushalot_authorizationtoken=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang)
|
self._sendPushalot(pushalot_authorizationtoken=None,
|
||||||
|
event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD],
|
||||||
|
message=ep_name + ": " + lang)
|
||||||
|
|
||||||
def _sendPushalot(self, pushalot_authorizationtoken=None, event=None, message=None, force=False):
|
def _sendPushalot(self, pushalot_authorizationtoken=None, event=None, message=None, force=False):
|
||||||
|
|
||||||
|
@ -57,13 +62,13 @@ class PushalotNotifier:
|
||||||
|
|
||||||
data = {'AuthorizationToken': pushalot_authorizationtoken,
|
data = {'AuthorizationToken': pushalot_authorizationtoken,
|
||||||
'Title': event.encode('utf-8'),
|
'Title': event.encode('utf-8'),
|
||||||
'Body': message.encode('utf-8') }
|
'Body': message.encode('utf-8')}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
http_handler.request("POST",
|
http_handler.request("POST",
|
||||||
"/api/sendmessage",
|
"/api/sendmessage",
|
||||||
headers = {'Content-type': "application/x-www-form-urlencoded"},
|
headers={'Content-type': "application/x-www-form-urlencoded"},
|
||||||
body = urlencode(data))
|
body=urlencode(data))
|
||||||
except (SSLError, HTTPException):
|
except (SSLError, HTTPException):
|
||||||
logger.log(u"Pushalot notification failed.", logger.ERROR)
|
logger.log(u"Pushalot notification failed.", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
@ -80,4 +85,5 @@ class PushalotNotifier:
|
||||||
logger.log(u"Pushalot notification failed.", logger.ERROR)
|
logger.log(u"Pushalot notification failed.", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
notifier = PushalotNotifier
|
notifier = PushalotNotifier
|
||||||
|
|
|
@ -25,27 +25,32 @@ from ssl import SSLError
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard import logger, common
|
from sickbeard import logger, common
|
||||||
|
|
||||||
class PushbulletNotifier:
|
|
||||||
|
|
||||||
|
class PushbulletNotifier:
|
||||||
def test_notify(self, pushbullet_api):
|
def test_notify(self, pushbullet_api):
|
||||||
return self._sendPushbullet(pushbullet_api, event="Test", message="Testing Pushbullet settings from Sick Beard", method="POST", notificationType="note", force=True)
|
return self._sendPushbullet(pushbullet_api, event="Test", message="Testing Pushbullet settings from Sick Beard",
|
||||||
|
method="POST", notificationType="note", force=True)
|
||||||
|
|
||||||
def get_devices(self, pushbullet_api):
|
def get_devices(self, pushbullet_api):
|
||||||
return self._sendPushbullet(pushbullet_api, method="GET", force=True)
|
return self._sendPushbullet(pushbullet_api, method="GET", force=True)
|
||||||
|
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
if sickbeard.PUSHBULLET_NOTIFY_ONSNATCH:
|
if sickbeard.PUSHBULLET_NOTIFY_ONSNATCH:
|
||||||
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name, notificationType="note", method="POST")
|
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name,
|
||||||
|
notificationType="note", method="POST")
|
||||||
|
|
||||||
def notify_download(self, ep_name):
|
def notify_download(self, ep_name):
|
||||||
if sickbeard.PUSHBULLET_NOTIFY_ONDOWNLOAD:
|
if sickbeard.PUSHBULLET_NOTIFY_ONDOWNLOAD:
|
||||||
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name, notificationType="note", method="POST")
|
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD],
|
||||||
|
message=ep_name, notificationType="note", method="POST")
|
||||||
|
|
||||||
def notify_subtitle_download(self, ep_name, lang):
|
def notify_subtitle_download(self, ep_name, lang):
|
||||||
if sickbeard.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD:
|
if sickbeard.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||||
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang, notificationType="note", method="POST")
|
self._sendPushbullet(pushbullet_api=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD],
|
||||||
|
message=ep_name + ": " + lang, notificationType="note", method="POST")
|
||||||
|
|
||||||
def _sendPushbullet(self, pushbullet_api=None, pushbullet_device=None, event=None, message=None, notificationType=None, method=None, force=False):
|
def _sendPushbullet(self, pushbullet_api=None, pushbullet_device=None, event=None, message=None,
|
||||||
|
notificationType=None, method=None, force=False):
|
||||||
|
|
||||||
if not sickbeard.USE_PUSHBULLET and not force:
|
if not sickbeard.USE_PUSHBULLET and not force:
|
||||||
return False
|
return False
|
||||||
|
@ -74,7 +79,7 @@ class PushbulletNotifier:
|
||||||
testMessage = True
|
testMessage = True
|
||||||
try:
|
try:
|
||||||
logger.log(u"Testing Pushbullet authentication and retrieving the device list.", logger.DEBUG)
|
logger.log(u"Testing Pushbullet authentication and retrieving the device list.", logger.DEBUG)
|
||||||
http_handler.request(method, uri, None, headers={'Authorization':'Basic %s:' % authString})
|
http_handler.request(method, uri, None, headers={'Authorization': 'Basic %s:' % authString})
|
||||||
except (SSLError, HTTPException):
|
except (SSLError, HTTPException):
|
||||||
logger.log(u"Pushbullet notification failed.", logger.ERROR)
|
logger.log(u"Pushbullet notification failed.", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
@ -86,7 +91,8 @@ class PushbulletNotifier:
|
||||||
'body': message.encode('utf-8'),
|
'body': message.encode('utf-8'),
|
||||||
'device_iden': pushbullet_device,
|
'device_iden': pushbullet_device,
|
||||||
'type': notificationType}
|
'type': notificationType}
|
||||||
http_handler.request(method, uri, body = urlencode(data), headers={'Authorization':'Basic %s' % authString})
|
http_handler.request(method, uri, body=urlencode(data),
|
||||||
|
headers={'Authorization': 'Basic %s' % authString})
|
||||||
pass
|
pass
|
||||||
except (SSLError, HTTPException):
|
except (SSLError, HTTPException):
|
||||||
return False
|
return False
|
||||||
|
@ -108,5 +114,6 @@ class PushbulletNotifier:
|
||||||
logger.log(u"Pushbullet notification failed.", logger.ERROR)
|
logger.log(u"Pushbullet notification failed.", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
notifier = PushbulletNotifier
|
notifier = PushbulletNotifier
|
||||||
|
|
||||||
|
|
|
@ -30,12 +30,12 @@ from sickbeard.exceptions import ex
|
||||||
API_URL = "https://api.pushover.net/1/messages.json"
|
API_URL = "https://api.pushover.net/1/messages.json"
|
||||||
API_KEY = "OKCXmkvHN1syU2e8xvpefTnyvVWGv5"
|
API_KEY = "OKCXmkvHN1syU2e8xvpefTnyvVWGv5"
|
||||||
|
|
||||||
|
|
||||||
class PushoverNotifier:
|
class PushoverNotifier:
|
||||||
|
|
||||||
def test_notify(self, userKey=None):
|
def test_notify(self, userKey=None):
|
||||||
return self._sendPushover("This is a test notification from SickBeard", 'Test', userKey )
|
return self._sendPushover("This is a test notification from SickBeard", 'Test', userKey)
|
||||||
|
|
||||||
def _sendPushover(self, msg, title, userKey=None ):
|
def _sendPushover(self, msg, title, userKey=None):
|
||||||
"""
|
"""
|
||||||
Sends a pushover notification to the address provided
|
Sends a pushover notification to the address provided
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ class PushoverNotifier:
|
||||||
elif e.code == 401:
|
elif e.code == 401:
|
||||||
|
|
||||||
#HTTP status 401 if the user doesn't have the service added
|
#HTTP status 401 if the user doesn't have the service added
|
||||||
subscribeNote = self._sendPushover(msg, title, userKey )
|
subscribeNote = self._sendPushover(msg, title, userKey)
|
||||||
if subscribeNote:
|
if subscribeNote:
|
||||||
logger.log("Subscription send", logger.DEBUG)
|
logger.log("Subscription send", logger.DEBUG)
|
||||||
return True
|
return True
|
||||||
|
@ -114,7 +114,7 @@ class PushoverNotifier:
|
||||||
if sickbeard.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD:
|
if sickbeard.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||||
self._notifyPushover(title, ep_name + ": " + lang)
|
self._notifyPushover(title, ep_name + ": " + lang)
|
||||||
|
|
||||||
def _notifyPushover(self, title, message, userKey=None ):
|
def _notifyPushover(self, title, message, userKey=None):
|
||||||
"""
|
"""
|
||||||
Sends a pushover notification based on the provided info or SB config
|
Sends a pushover notification based on the provided info or SB config
|
||||||
|
|
||||||
|
@ -137,4 +137,5 @@ class PushoverNotifier:
|
||||||
self._sendPushover(message, title)
|
self._sendPushover(message, title)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
notifier = PushoverNotifier
|
notifier = PushoverNotifier
|
||||||
|
|
|
@ -25,8 +25,8 @@ from urllib2 import Request, urlopen, URLError
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
|
|
||||||
class pyTivoNotifier:
|
|
||||||
|
|
||||||
|
class pyTivoNotifier:
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ class pyTivoNotifier:
|
||||||
|
|
||||||
# Some show names have colons in them which are illegal in a path location, so strip them out.
|
# Some show names have colons in them which are illegal in a path location, so strip them out.
|
||||||
# (Are there other characters?)
|
# (Are there other characters?)
|
||||||
showName = showName.replace(":","")
|
showName = showName.replace(":", "")
|
||||||
|
|
||||||
root = showPath.replace(showName, "")
|
root = showPath.replace(showName, "")
|
||||||
showAndSeason = rootShowAndSeason.replace(root, "")
|
showAndSeason = rootShowAndSeason.replace(root, "")
|
||||||
|
@ -78,11 +78,12 @@ class pyTivoNotifier:
|
||||||
file = "/" + absPath.replace(root, "")
|
file = "/" + absPath.replace(root, "")
|
||||||
|
|
||||||
# Finally create the url and make request
|
# Finally create the url and make request
|
||||||
requestUrl = "http://" + host + "/TiVoConnect?" + urlencode( {'Command':'Push', 'Container':container, 'File':file, 'tsn':tsn} )
|
requestUrl = "http://" + host + "/TiVoConnect?" + urlencode(
|
||||||
|
{'Command': 'Push', 'Container': container, 'File': file, 'tsn': tsn})
|
||||||
|
|
||||||
logger.log(u"pyTivo notification: Requesting " + requestUrl)
|
logger.log(u"pyTivo notification: Requesting " + requestUrl)
|
||||||
|
|
||||||
request = Request( requestUrl )
|
request = Request(requestUrl)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = urlopen(request) #@UnusedVariable
|
response = urlopen(request) #@UnusedVariable
|
||||||
|
@ -99,4 +100,5 @@ class pyTivoNotifier:
|
||||||
logger.log(u"pyTivo notification: Successfully requested transfer of file")
|
logger.log(u"pyTivo notification: Successfully requested transfer of file")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
notifier = pyTivoNotifier
|
notifier = pyTivoNotifier
|
||||||
|
|
|
@ -27,8 +27,8 @@ from sickbeard import logger
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
class synoIndexNotifier:
|
|
||||||
|
|
||||||
|
class synoIndexNotifier:
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -46,15 +46,17 @@ class synoIndexNotifier:
|
||||||
|
|
||||||
def moveObject(self, old_path, new_path):
|
def moveObject(self, old_path, new_path):
|
||||||
if sickbeard.USE_SYNOINDEX:
|
if sickbeard.USE_SYNOINDEX:
|
||||||
synoindex_cmd = ['/usr/syno/bin/synoindex', '-N', ek.ek(os.path.abspath, new_path), ek.ek(os.path.abspath, old_path)]
|
synoindex_cmd = ['/usr/syno/bin/synoindex', '-N', ek.ek(os.path.abspath, new_path),
|
||||||
logger.log(u"Executing command "+str(synoindex_cmd))
|
ek.ek(os.path.abspath, old_path)]
|
||||||
logger.log(u"Absolute path to command: "+ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG)
|
logger.log(u"Executing command " + str(synoindex_cmd))
|
||||||
|
logger.log(u"Absolute path to command: " + ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG)
|
||||||
try:
|
try:
|
||||||
p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||||
|
cwd=sickbeard.PROG_DIR)
|
||||||
out, err = p.communicate() #@UnusedVariable
|
out, err = p.communicate() #@UnusedVariable
|
||||||
logger.log(u"Script result: "+str(out), logger.DEBUG)
|
logger.log(u"Script result: " + str(out), logger.DEBUG)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
logger.log(u"Unable to run synoindex: "+ex(e))
|
logger.log(u"Unable to run synoindex: " + ex(e))
|
||||||
|
|
||||||
def deleteFolder(self, cur_path):
|
def deleteFolder(self, cur_path):
|
||||||
self.makeObject('-D', cur_path)
|
self.makeObject('-D', cur_path)
|
||||||
|
@ -71,13 +73,15 @@ class synoIndexNotifier:
|
||||||
def makeObject(self, cmd_arg, cur_path):
|
def makeObject(self, cmd_arg, cur_path):
|
||||||
if sickbeard.USE_SYNOINDEX:
|
if sickbeard.USE_SYNOINDEX:
|
||||||
synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, ek.ek(os.path.abspath, cur_path)]
|
synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, ek.ek(os.path.abspath, cur_path)]
|
||||||
logger.log(u"Executing command "+str(synoindex_cmd))
|
logger.log(u"Executing command " + str(synoindex_cmd))
|
||||||
logger.log(u"Absolute path to command: "+ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG)
|
logger.log(u"Absolute path to command: " + ek.ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG)
|
||||||
try:
|
try:
|
||||||
p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||||
|
cwd=sickbeard.PROG_DIR)
|
||||||
out, err = p.communicate() #@UnusedVariable
|
out, err = p.communicate() #@UnusedVariable
|
||||||
logger.log(u"Script result: "+str(out), logger.DEBUG)
|
logger.log(u"Script result: " + str(out), logger.DEBUG)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
logger.log(u"Unable to run synoindex: "+ex(e))
|
logger.log(u"Unable to run synoindex: " + ex(e))
|
||||||
|
|
||||||
|
|
||||||
notifier = synoIndexNotifier
|
notifier = synoIndexNotifier
|
||||||
|
|
|
@ -27,8 +27,8 @@ from sickbeard import encodingKludge as ek
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
from sickbeard import common
|
from sickbeard import common
|
||||||
|
|
||||||
class synologyNotifier:
|
|
||||||
|
|
||||||
|
class synologyNotifier:
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH:
|
if sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH:
|
||||||
self._send_synologyNotifier(ep_name, common.notifyStrings[common.NOTIFY_SNATCH])
|
self._send_synologyNotifier(ep_name, common.notifyStrings[common.NOTIFY_SNATCH])
|
||||||
|
@ -43,13 +43,15 @@ class synologyNotifier:
|
||||||
|
|
||||||
def _send_synologyNotifier(self, message, title):
|
def _send_synologyNotifier(self, message, title):
|
||||||
synodsmnotify_cmd = ["/usr/syno/bin/synodsmnotify", "@administrators", title, message]
|
synodsmnotify_cmd = ["/usr/syno/bin/synodsmnotify", "@administrators", title, message]
|
||||||
logger.log(u"Executing command "+str(synodsmnotify_cmd))
|
logger.log(u"Executing command " + str(synodsmnotify_cmd))
|
||||||
logger.log(u"Absolute path to command: "+ek.ek(os.path.abspath, synodsmnotify_cmd[0]), logger.DEBUG)
|
logger.log(u"Absolute path to command: " + ek.ek(os.path.abspath, synodsmnotify_cmd[0]), logger.DEBUG)
|
||||||
try:
|
try:
|
||||||
p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
p = subprocess.Popen(synodsmnotify_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||||
|
cwd=sickbeard.PROG_DIR)
|
||||||
out, err = p.communicate() #@UnusedVariable
|
out, err = p.communicate() #@UnusedVariable
|
||||||
logger.log(u"Script result: "+str(out), logger.DEBUG)
|
logger.log(u"Script result: " + str(out), logger.DEBUG)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
logger.log(u"Unable to run synodsmnotify: "+ex(e))
|
logger.log(u"Unable to run synodsmnotify: " + ex(e))
|
||||||
|
|
||||||
|
|
||||||
notifier = synologyNotifier
|
notifier = synologyNotifier
|
||||||
|
|
|
@ -20,6 +20,7 @@ import sickbeard
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from lib.trakt import *
|
from lib.trakt import *
|
||||||
|
|
||||||
|
|
||||||
class TraktNotifier:
|
class TraktNotifier:
|
||||||
"""
|
"""
|
||||||
A "notifier" for trakt.tv which keeps track of what has and hasn't been added to your library.
|
A "notifier" for trakt.tv which keeps track of what has and hasn't been added to your library.
|
||||||
|
@ -48,10 +49,10 @@ class TraktNotifier:
|
||||||
'indexer_id': ep_obj.show.indexerid,
|
'indexer_id': ep_obj.show.indexerid,
|
||||||
'title': ep_obj.show.name,
|
'title': ep_obj.show.name,
|
||||||
'year': ep_obj.show.startyear,
|
'year': ep_obj.show.startyear,
|
||||||
'episodes': [ {
|
'episodes': [{
|
||||||
'season': ep_obj.season,
|
'season': ep_obj.season,
|
||||||
'episode': ep_obj.episode
|
'episode': ep_obj.episode
|
||||||
} ]
|
}]
|
||||||
}
|
}
|
||||||
|
|
||||||
if data is not None:
|
if data is not None:
|
||||||
|
|
|
@ -30,8 +30,8 @@ except:
|
||||||
import lib.oauth2 as oauth
|
import lib.oauth2 as oauth
|
||||||
import lib.pythontwitter as twitter
|
import lib.pythontwitter as twitter
|
||||||
|
|
||||||
class TwitterNotifier:
|
|
||||||
|
|
||||||
|
class TwitterNotifier:
|
||||||
consumer_key = "vHHtcB6WzpWDG6KYlBMr8g"
|
consumer_key = "vHHtcB6WzpWDG6KYlBMr8g"
|
||||||
consumer_secret = "zMqq5CB3f8cWKiRO2KzWPTlBanYmV0VYxSXZ0Pxds0E"
|
consumer_secret = "zMqq5CB3f8cWKiRO2KzWPTlBanYmV0VYxSXZ0Pxds0E"
|
||||||
|
|
||||||
|
@ -42,15 +42,15 @@ class TwitterNotifier:
|
||||||
|
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
if sickbeard.TWITTER_NOTIFY_ONSNATCH:
|
if sickbeard.TWITTER_NOTIFY_ONSNATCH:
|
||||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SNATCH]+': '+ep_name)
|
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SNATCH] + ': ' + ep_name)
|
||||||
|
|
||||||
def notify_download(self, ep_name):
|
def notify_download(self, ep_name):
|
||||||
if sickbeard.TWITTER_NOTIFY_ONDOWNLOAD:
|
if sickbeard.TWITTER_NOTIFY_ONDOWNLOAD:
|
||||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_DOWNLOAD]+': '+ep_name)
|
self._notifyTwitter(common.notifyStrings[common.NOTIFY_DOWNLOAD] + ': ' + ep_name)
|
||||||
|
|
||||||
def notify_subtitle_download(self, ep_name, lang):
|
def notify_subtitle_download(self, ep_name, lang):
|
||||||
if sickbeard.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD:
|
if sickbeard.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD:
|
||||||
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD]+' '+ep_name + ": " + lang)
|
self._notifyTwitter(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD] + ' ' + ep_name + ": " + lang)
|
||||||
|
|
||||||
def test_notify(self):
|
def test_notify(self):
|
||||||
return self._notifyTwitter("This is a test notification from Sick Beard", force=True)
|
return self._notifyTwitter("This is a test notification from Sick Beard", force=True)
|
||||||
|
@ -73,7 +73,7 @@ class TwitterNotifier:
|
||||||
sickbeard.TWITTER_USERNAME = request_token['oauth_token']
|
sickbeard.TWITTER_USERNAME = request_token['oauth_token']
|
||||||
sickbeard.TWITTER_PASSWORD = request_token['oauth_token_secret']
|
sickbeard.TWITTER_PASSWORD = request_token['oauth_token_secret']
|
||||||
|
|
||||||
return self.AUTHORIZATION_URL+"?oauth_token="+ request_token['oauth_token']
|
return self.AUTHORIZATION_URL + "?oauth_token=" + request_token['oauth_token']
|
||||||
|
|
||||||
def _get_credentials(self, key):
|
def _get_credentials(self, key):
|
||||||
request_token = {}
|
request_token = {}
|
||||||
|
@ -85,22 +85,22 @@ class TwitterNotifier:
|
||||||
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
|
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
|
||||||
token.set_verifier(key)
|
token.set_verifier(key)
|
||||||
|
|
||||||
logger.log('Generating and signing request for an access token using key '+key)
|
logger.log('Generating and signing request for an access token using key ' + key)
|
||||||
|
|
||||||
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
|
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() #@UnusedVariable
|
||||||
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
|
oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret)
|
||||||
logger.log('oauth_consumer: '+str(oauth_consumer))
|
logger.log('oauth_consumer: ' + str(oauth_consumer))
|
||||||
oauth_client = oauth.Client(oauth_consumer, token)
|
oauth_client = oauth.Client(oauth_consumer, token)
|
||||||
logger.log('oauth_client: '+str(oauth_client))
|
logger.log('oauth_client: ' + str(oauth_client))
|
||||||
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
|
resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key)
|
||||||
logger.log('resp, content: '+str(resp)+','+str(content))
|
logger.log('resp, content: ' + str(resp) + ',' + str(content))
|
||||||
|
|
||||||
access_token = dict(parse_qsl(content))
|
access_token = dict(parse_qsl(content))
|
||||||
logger.log('access_token: '+str(access_token))
|
logger.log('access_token: ' + str(access_token))
|
||||||
|
|
||||||
logger.log('resp[status] = '+str(resp['status']))
|
logger.log('resp[status] = ' + str(resp['status']))
|
||||||
if resp['status'] != '200':
|
if resp['status'] != '200':
|
||||||
logger.log('The request for a token with did not succeed: '+str(resp['status']), logger.ERROR)
|
logger.log('The request for a token with did not succeed: ' + str(resp['status']), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
logger.log('Your Twitter Access Token key: %s' % access_token['oauth_token'])
|
logger.log('Your Twitter Access Token key: %s' % access_token['oauth_token'])
|
||||||
|
@ -112,19 +112,19 @@ class TwitterNotifier:
|
||||||
|
|
||||||
def _send_tweet(self, message=None):
|
def _send_tweet(self, message=None):
|
||||||
|
|
||||||
username=self.consumer_key
|
username = self.consumer_key
|
||||||
password=self.consumer_secret
|
password = self.consumer_secret
|
||||||
access_token_key=sickbeard.TWITTER_USERNAME
|
access_token_key = sickbeard.TWITTER_USERNAME
|
||||||
access_token_secret=sickbeard.TWITTER_PASSWORD
|
access_token_secret = sickbeard.TWITTER_PASSWORD
|
||||||
|
|
||||||
logger.log(u"Sending tweet: "+message)
|
logger.log(u"Sending tweet: " + message)
|
||||||
|
|
||||||
api = twitter.Api(username, password, access_token_key, access_token_secret)
|
api = twitter.Api(username, password, access_token_key, access_token_secret)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
api.PostUpdate(message.encode('utf8'))
|
api.PostUpdate(message.encode('utf8'))
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"Error Sending Tweet: "+ex(e), logger.ERROR)
|
logger.log(u"Error Sending Tweet: " + ex(e), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -135,6 +135,7 @@ class TwitterNotifier:
|
||||||
if not sickbeard.USE_TWITTER and not force:
|
if not sickbeard.USE_TWITTER and not force:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return self._send_tweet(prefix+": "+message)
|
return self._send_tweet(prefix + ": " + message)
|
||||||
|
|
||||||
|
|
||||||
notifier = TwitterNotifier
|
notifier = TwitterNotifier
|
|
@ -41,7 +41,6 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
class XBMCNotifier:
|
class XBMCNotifier:
|
||||||
|
|
||||||
sb_logo_url = 'http://www.sickbeard.com/xbmc-notify.png'
|
sb_logo_url = 'http://www.sickbeard.com/xbmc-notify.png'
|
||||||
|
|
||||||
def _get_xbmc_version(self, host, username, password):
|
def _get_xbmc_version(self, host, username, password):
|
||||||
|
@ -133,18 +132,22 @@ class XBMCNotifier:
|
||||||
if xbmcapi:
|
if xbmcapi:
|
||||||
if (xbmcapi <= 4):
|
if (xbmcapi <= 4):
|
||||||
logger.log(u"Detected XBMC version <= 11, using XBMC HTTP API", logger.DEBUG)
|
logger.log(u"Detected XBMC version <= 11, using XBMC HTTP API", logger.DEBUG)
|
||||||
command = {'command': 'ExecBuiltIn', 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')'}
|
command = {'command': 'ExecBuiltIn',
|
||||||
|
'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode(
|
||||||
|
"utf-8") + ')'}
|
||||||
notifyResult = self._send_to_xbmc(command, curHost, username, password)
|
notifyResult = self._send_to_xbmc(command, curHost, username, password)
|
||||||
if notifyResult:
|
if notifyResult:
|
||||||
result += curHost + ':' + str(notifyResult)
|
result += curHost + ':' + str(notifyResult)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Detected XBMC version >= 12, using XBMC JSON API", logger.DEBUG)
|
logger.log(u"Detected XBMC version >= 12, using XBMC JSON API", logger.DEBUG)
|
||||||
command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % (title.encode("utf-8"), message.encode("utf-8"), self.sb_logo_url)
|
command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % (
|
||||||
|
title.encode("utf-8"), message.encode("utf-8"), self.sb_logo_url)
|
||||||
notifyResult = self._send_to_xbmc_json(command, curHost, username, password)
|
notifyResult = self._send_to_xbmc_json(command, curHost, username, password)
|
||||||
if notifyResult:
|
if notifyResult:
|
||||||
result += curHost + ':' + notifyResult["result"].decode(sickbeard.SYS_ENCODING)
|
result += curHost + ':' + notifyResult["result"].decode(sickbeard.SYS_ENCODING)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Failed to detect XBMC version for '" + curHost + "', check configuration and try again.", logger.ERROR)
|
logger.log(u"Failed to detect XBMC version for '" + curHost + "', check configuration and try again.",
|
||||||
|
logger.ERROR)
|
||||||
result += curHost + ':False'
|
result += curHost + ':False'
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -182,14 +185,15 @@ class XBMCNotifier:
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.", logger.DEBUG)
|
logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.",
|
||||||
|
logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# Legacy HTTP API (pre XBMC 12) methods
|
# Legacy HTTP API (pre XBMC 12) methods
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
def _send_to_xbmc(self, command, host=None, username=None, password=None):
|
def _send_to_xbmc(self, command, host=None, username=None, password=None):
|
||||||
"""Handles communication to XBMC servers via HTTP API
|
"""Handles communication to XBMC servers via HTTP API
|
||||||
|
@ -242,7 +246,8 @@ class XBMCNotifier:
|
||||||
return result
|
return result
|
||||||
|
|
||||||
except (urllib2.URLError, IOError), e:
|
except (urllib2.URLError, IOError), e:
|
||||||
logger.log(u"Warning: Couldn't contact XBMC HTTP at " + fixStupidEncodings(url) + " " + ex(e), logger.WARNING)
|
logger.log(u"Warning: Couldn't contact XBMC HTTP at " + fixStupidEncodings(url) + " " + ex(e),
|
||||||
|
logger.WARNING)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _update_library(self, host=None, showName=None):
|
def _update_library(self, host=None, showName=None):
|
||||||
|
@ -275,7 +280,8 @@ class XBMCNotifier:
|
||||||
'and tvshowlinkpath.idPath = path.idPath' % (showName)
|
'and tvshowlinkpath.idPath = path.idPath' % (showName)
|
||||||
|
|
||||||
# use this to get xml back for the path lookups
|
# use this to get xml back for the path lookups
|
||||||
xmlCommand = {'command': 'SetResponseFormat(webheader;false;webfooter;false;header;<xml>;footer;</xml>;opentag;<tag>;closetag;</tag>;closefinaltag;false)'}
|
xmlCommand = {
|
||||||
|
'command': 'SetResponseFormat(webheader;false;webfooter;false;header;<xml>;footer;</xml>;opentag;<tag>;closetag;</tag>;closefinaltag;false)'}
|
||||||
# sql used to grab path(s)
|
# sql used to grab path(s)
|
||||||
sqlCommand = {'command': 'QueryVideoDatabase(%s)' % (pathSql)}
|
sqlCommand = {'command': 'QueryVideoDatabase(%s)' % (pathSql)}
|
||||||
# set output back to default
|
# set output back to default
|
||||||
|
@ -313,7 +319,8 @@ class XBMCNotifier:
|
||||||
updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'XBMC.updatelibrary(video, %s)' % (unEncPath)}
|
updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'XBMC.updatelibrary(video, %s)' % (unEncPath)}
|
||||||
request = self._send_to_xbmc(updateCommand, host)
|
request = self._send_to_xbmc(updateCommand, host)
|
||||||
if not request:
|
if not request:
|
||||||
logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + unEncPath, logger.ERROR)
|
logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + unEncPath,
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
# sleep for a few seconds just to be sure xbmc has a chance to finish each directory
|
# sleep for a few seconds just to be sure xbmc has a chance to finish each directory
|
||||||
if len(paths) > 1:
|
if len(paths) > 1:
|
||||||
|
@ -330,9 +337,9 @@ class XBMCNotifier:
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# JSON-RPC API (XBMC 12+) methods
|
# JSON-RPC API (XBMC 12+) methods
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
def _send_to_xbmc_json(self, command, host=None, username=None, password=None):
|
def _send_to_xbmc_json(self, command, host=None, username=None, password=None):
|
||||||
"""Handles communication to XBMC servers via JSONRPC
|
"""Handles communication to XBMC servers via JSONRPC
|
||||||
|
@ -377,7 +384,8 @@ class XBMCNotifier:
|
||||||
try:
|
try:
|
||||||
response = urllib2.urlopen(req)
|
response = urllib2.urlopen(req)
|
||||||
except urllib2.URLError, e:
|
except urllib2.URLError, e:
|
||||||
logger.log(u"Error while trying to retrieve XBMC API version for " + host + ": " + ex(e), logger.WARNING)
|
logger.log(u"Error while trying to retrieve XBMC API version for " + host + ": " + ex(e),
|
||||||
|
logger.WARNING)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# parse the json result
|
# parse the json result
|
||||||
|
@ -391,7 +399,8 @@ class XBMCNotifier:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
except IOError, e:
|
except IOError, e:
|
||||||
logger.log(u"Warning: Couldn't contact XBMC JSON API at " + fixStupidEncodings(url) + " " + ex(e), logger.WARNING)
|
logger.log(u"Warning: Couldn't contact XBMC JSON API at " + fixStupidEncodings(url) + " " + ex(e),
|
||||||
|
logger.WARNING)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _update_library_json(self, host=None, showName=None):
|
def _update_library_json(self, host=None, showName=None):
|
||||||
|
@ -441,27 +450,34 @@ class XBMCNotifier:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# lookup tv-show path
|
# lookup tv-show path
|
||||||
pathCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShowDetails","params":{"tvshowid":%d, "properties": ["file"]},"id":1}' % (tvshowid)
|
pathCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShowDetails","params":{"tvshowid":%d, "properties": ["file"]},"id":1}' % (
|
||||||
|
tvshowid)
|
||||||
pathResponse = self._send_to_xbmc_json(pathCommand, host)
|
pathResponse = self._send_to_xbmc_json(pathCommand, host)
|
||||||
|
|
||||||
path = pathResponse["result"]["tvshowdetails"]["file"]
|
path = pathResponse["result"]["tvshowdetails"]["file"]
|
||||||
logger.log(u"Received Show: " + show["label"] + " with ID: " + str(tvshowid) + " Path: " + path, logger.DEBUG)
|
logger.log(u"Received Show: " + show["label"] + " with ID: " + str(tvshowid) + " Path: " + path,
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
if (len(path) < 1):
|
if (len(path) < 1):
|
||||||
logger.log(u"No valid path found for " + showName + " with ID: " + str(tvshowid) + " on " + host, logger.WARNING)
|
logger.log(u"No valid path found for " + showName + " with ID: " + str(tvshowid) + " on " + host,
|
||||||
|
logger.WARNING)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.log(u"XBMC Updating " + showName + " on " + host + " at " + path, logger.DEBUG)
|
logger.log(u"XBMC Updating " + showName + " on " + host + " at " + path, logger.DEBUG)
|
||||||
updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % (json.dumps(path))
|
updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % (
|
||||||
|
json.dumps(path))
|
||||||
request = self._send_to_xbmc_json(updateCommand, host)
|
request = self._send_to_xbmc_json(updateCommand, host)
|
||||||
if not request:
|
if not request:
|
||||||
logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + path, logger.ERROR)
|
logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + path,
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# catch if there was an error in the returned request
|
# catch if there was an error in the returned request
|
||||||
for r in request:
|
for r in request:
|
||||||
if 'error' in r:
|
if 'error' in r:
|
||||||
logger.log(u"Error while attempting to update show directory for " + showName + " on " + host + " at " + path, logger.ERROR)
|
logger.log(
|
||||||
|
u"Error while attempting to update show directory for " + showName + " on " + host + " at " + path,
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# do a full update if requested
|
# do a full update if requested
|
||||||
|
@ -476,9 +492,9 @@ class XBMCNotifier:
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# Public functions which will call the JSON or Legacy HTTP API methods
|
# Public functions which will call the JSON or Legacy HTTP API methods
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
def notify_snatch(self, ep_name):
|
def notify_snatch(self, ep_name):
|
||||||
if sickbeard.XBMC_NOTIFY_ONSNATCH:
|
if sickbeard.XBMC_NOTIFY_ONSNATCH:
|
||||||
|
@ -493,7 +509,8 @@ class XBMCNotifier:
|
||||||
self._notify_xbmc(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD])
|
self._notify_xbmc(ep_name + ": " + lang, common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD])
|
||||||
|
|
||||||
def test_notify(self, host, username, password):
|
def test_notify(self, host, username, password):
|
||||||
return self._notify_xbmc("Testing XBMC notifications from Sick Beard", "Test Notification", host, username, password, force=True)
|
return self._notify_xbmc("Testing XBMC notifications from Sick Beard", "Test Notification", host, username,
|
||||||
|
password, force=True)
|
||||||
|
|
||||||
def update_library(self, showName=None):
|
def update_library(self, showName=None):
|
||||||
"""Public wrapper for the update library functions to branch the logic for JSON-RPC or legacy HTTP API
|
"""Public wrapper for the update library functions to branch the logic for JSON-RPC or legacy HTTP API
|
||||||
|
@ -521,10 +538,12 @@ class XBMCNotifier:
|
||||||
for host in [x.strip() for x in sickbeard.XBMC_HOST.split(",")]:
|
for host in [x.strip() for x in sickbeard.XBMC_HOST.split(",")]:
|
||||||
if self._send_update_library(host, showName):
|
if self._send_update_library(host, showName):
|
||||||
if sickbeard.XBMC_UPDATE_ONLYFIRST:
|
if sickbeard.XBMC_UPDATE_ONLYFIRST:
|
||||||
logger.log(u"Successfully updated '" + host + "', stopped sending update library commands.", logger.DEBUG)
|
logger.log(u"Successfully updated '" + host + "', stopped sending update library commands.",
|
||||||
|
logger.DEBUG)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.", logger.ERROR)
|
logger.log(u"Failed to detect XBMC version for '" + host + "', check configuration and try again.",
|
||||||
|
logger.ERROR)
|
||||||
result = result + 1
|
result = result + 1
|
||||||
|
|
||||||
# needed for the 'update xbmc' submenu command
|
# needed for the 'update xbmc' submenu command
|
||||||
|
@ -534,4 +553,5 @@ class XBMCNotifier:
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
notifier = XBMCNotifier
|
notifier = XBMCNotifier
|
||||||
|
|
|
@ -32,7 +32,6 @@ from sickbeard.exceptions import ex
|
||||||
|
|
||||||
|
|
||||||
def getSeasonNZBs(name, urlData, season):
|
def getSeasonNZBs(name, urlData, season):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
showXML = etree.ElementTree(etree.XML(urlData))
|
showXML = etree.ElementTree(etree.XML(urlData))
|
||||||
except SyntaxError:
|
except SyntaxError:
|
||||||
|
@ -78,7 +77,6 @@ def getSeasonNZBs(name, urlData, season):
|
||||||
|
|
||||||
|
|
||||||
def createNZBString(fileElements, xmlns):
|
def createNZBString(fileElements, xmlns):
|
||||||
|
|
||||||
rootElement = etree.Element("nzb")
|
rootElement = etree.Element("nzb")
|
||||||
if xmlns:
|
if xmlns:
|
||||||
rootElement.set("xmlns", xmlns)
|
rootElement.set("xmlns", xmlns)
|
||||||
|
@ -90,7 +88,6 @@ def createNZBString(fileElements, xmlns):
|
||||||
|
|
||||||
|
|
||||||
def saveNZB(nzbName, nzbString):
|
def saveNZB(nzbName, nzbString):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with ek.ek(open, nzbName + ".nzb", 'w') as nzb_fh:
|
with ek.ek(open, nzbName + ".nzb", 'w') as nzb_fh:
|
||||||
nzb_fh.write(nzbString)
|
nzb_fh.write(nzbString)
|
||||||
|
@ -108,7 +105,6 @@ def stripNS(element, ns):
|
||||||
|
|
||||||
|
|
||||||
def splitResult(result):
|
def splitResult(result):
|
||||||
|
|
||||||
urlData = helpers.getURL(result.url)
|
urlData = helpers.getURL(result.url)
|
||||||
|
|
||||||
if urlData is None:
|
if urlData is None:
|
||||||
|
@ -143,17 +139,23 @@ def splitResult(result):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# make sure the result is sane
|
# make sure the result is sane
|
||||||
if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1):
|
if (parse_result.season_number != None and parse_result.season_number != season) or (
|
||||||
logger.log(u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it", logger.WARNING)
|
parse_result.season_number == None and season != 1):
|
||||||
|
logger.log(
|
||||||
|
u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it",
|
||||||
|
logger.WARNING)
|
||||||
continue
|
continue
|
||||||
elif len(parse_result.episode_numbers) == 0:
|
elif len(parse_result.episode_numbers) == 0:
|
||||||
logger.log(u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it", logger.WARNING)
|
logger.log(
|
||||||
|
u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it",
|
||||||
|
logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
wantEp = True
|
wantEp = True
|
||||||
for epNo in parse_result.episode_numbers:
|
for epNo in parse_result.episode_numbers:
|
||||||
if not result.extraInfo[0].wantEpisode(season, epNo, result.quality):
|
if not result.extraInfo[0].wantEpisode(season, epNo, result.quality):
|
||||||
logger.log(u"Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality], logger.DEBUG)
|
logger.log(u"Ignoring result " + newNZB + " because we don't want an episode that is " +
|
||||||
|
Quality.qualityStrings[result.quality], logger.DEBUG)
|
||||||
wantEp = False
|
wantEp = False
|
||||||
break
|
break
|
||||||
if not wantEp:
|
if not wantEp:
|
||||||
|
|
|
@ -33,8 +33,8 @@ from sickbeard import logger, helpers
|
||||||
|
|
||||||
from common import Quality
|
from common import Quality
|
||||||
|
|
||||||
def sendNZB(nzb, proper = False):
|
|
||||||
|
|
||||||
|
def sendNZB(nzb, proper=False):
|
||||||
addToTop = False
|
addToTop = False
|
||||||
nzbgetprio = 0
|
nzbgetprio = 0
|
||||||
nzbgetXMLrpc = "http://%(username)s:%(password)s@%(host)s/xmlrpc"
|
nzbgetXMLrpc = "http://%(username)s:%(password)s@%(host)s/xmlrpc"
|
||||||
|
@ -43,17 +43,21 @@ def sendNZB(nzb, proper = False):
|
||||||
logger.log(u"No NZBget host found in configuration. Please configure it.", logger.ERROR)
|
logger.log(u"No NZBget host found in configuration. Please configure it.", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
url = nzbgetXMLrpc % {"host": sickbeard.NZBGET_HOST, "username": sickbeard.NZBGET_USERNAME, "password": sickbeard.NZBGET_PASSWORD}
|
url = nzbgetXMLrpc % {"host": sickbeard.NZBGET_HOST, "username": sickbeard.NZBGET_USERNAME,
|
||||||
|
"password": sickbeard.NZBGET_PASSWORD}
|
||||||
|
|
||||||
nzbGetRPC = xmlrpclib.ServerProxy(url)
|
nzbGetRPC = xmlrpclib.ServerProxy(url)
|
||||||
try:
|
try:
|
||||||
if nzbGetRPC.writelog("INFO", "Sickbeard connected to drop of %s any moment now." % (nzb.name + ".nzb")):
|
if nzbGetRPC.writelog("INFO", "Sickbeard connected to drop of %s any moment now." % (nzb.name + ".nzb")):
|
||||||
logger.log(u"Successful connected to NZBget", logger.DEBUG)
|
logger.log(u"Successful connected to NZBget", logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Successful connected to NZBget, but unable to send a message" % (nzb.name + ".nzb"), logger.ERROR)
|
logger.log(u"Successful connected to NZBget, but unable to send a message" % (nzb.name + ".nzb"),
|
||||||
|
logger.ERROR)
|
||||||
|
|
||||||
except httplib.socket.error:
|
except httplib.socket.error:
|
||||||
logger.log(u"Please check your NZBget host and port (if it is running). NZBget is not responding to this combination", logger.ERROR)
|
logger.log(
|
||||||
|
u"Please check your NZBget host and port (if it is running). NZBget is not responding to this combination",
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
except xmlrpclib.ProtocolError, e:
|
except xmlrpclib.ProtocolError, e:
|
||||||
|
@ -102,9 +106,11 @@ def sendNZB(nzb, proper = False):
|
||||||
if nzbget_version == 0:
|
if nzbget_version == 0:
|
||||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, addToTop, nzbcontent64)
|
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, addToTop, nzbcontent64)
|
||||||
elif nzbget_version >= 12:
|
elif nzbget_version >= 12:
|
||||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False, nzbcontent64, False, dupekey, dupescore, "score")
|
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False,
|
||||||
|
nzbcontent64, False, dupekey, dupescore, "score")
|
||||||
else:
|
else:
|
||||||
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False, nzbcontent64)
|
nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, nzbgetprio, False,
|
||||||
|
nzbcontent64)
|
||||||
|
|
||||||
if nzbget_result:
|
if nzbget_result:
|
||||||
logger.log(u"NZB sent to NZBget successfully", logger.DEBUG)
|
logger.log(u"NZB sent to NZBget successfully", logger.DEBUG)
|
||||||
|
|
|
@ -46,8 +46,6 @@ from sickbeard.exceptions import ex
|
||||||
|
|
||||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
||||||
|
|
||||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
|
||||||
from common import indexerStrings
|
|
||||||
|
|
||||||
class PostProcessor(object):
|
class PostProcessor(object):
|
||||||
"""
|
"""
|
||||||
|
@ -59,7 +57,7 @@ class PostProcessor(object):
|
||||||
EXISTS_SMALLER = 3
|
EXISTS_SMALLER = 3
|
||||||
DOESNT_EXIST = 4
|
DOESNT_EXIST = 4
|
||||||
|
|
||||||
IGNORED_FILESTRINGS = [ "/.AppleDouble/", ".DS_Store" ]
|
IGNORED_FILESTRINGS = ["/.AppleDouble/", ".DS_Store"]
|
||||||
|
|
||||||
NZB_NAME = 1
|
NZB_NAME = 1
|
||||||
FOLDER_NAME = 2
|
FOLDER_NAME = 2
|
||||||
|
@ -149,7 +147,8 @@ class PostProcessor(object):
|
||||||
return PostProcessor.EXISTS_SMALLER
|
return PostProcessor.EXISTS_SMALLER
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self._log(u"File " + existing_file + " doesn't exist so there's no worries about replacing it", logger.DEBUG)
|
self._log(u"File " + existing_file + " doesn't exist so there's no worries about replacing it",
|
||||||
|
logger.DEBUG)
|
||||||
return PostProcessor.DOESNT_EXIST
|
return PostProcessor.DOESNT_EXIST
|
||||||
|
|
||||||
def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False):
|
def list_associated_files(self, file_path, base_name_only=False, subtitles_only=False):
|
||||||
|
@ -185,7 +184,7 @@ class PostProcessor(object):
|
||||||
if associated_file_path == file_path:
|
if associated_file_path == file_path:
|
||||||
continue
|
continue
|
||||||
# only list it if the only non-shared part is the extension or if it is a subtitle
|
# only list it if the only non-shared part is the extension or if it is a subtitle
|
||||||
if subtitles_only and not associated_file_path[len(associated_file_path)-3:] in common.subtitleExtensions:
|
if subtitles_only and not associated_file_path[len(associated_file_path) - 3:] in common.subtitleExtensions:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
#Exclude .rar files from associated list
|
#Exclude .rar files from associated list
|
||||||
|
@ -227,7 +226,7 @@ class PostProcessor(object):
|
||||||
# File is read-only, so make it writeable
|
# File is read-only, so make it writeable
|
||||||
self._log('Read only mode on file ' + cur_file + ' Will try to make it writeable', logger.DEBUG)
|
self._log('Read only mode on file ' + cur_file + ' Will try to make it writeable', logger.DEBUG)
|
||||||
try:
|
try:
|
||||||
ek.ek(os.chmod,cur_file,stat.S_IWRITE)
|
ek.ek(os.chmod, cur_file, stat.S_IWRITE)
|
||||||
except:
|
except:
|
||||||
self._log(u'Cannot change permissions of ' + cur_file, logger.WARNING)
|
self._log(u'Cannot change permissions of ' + cur_file, logger.WARNING)
|
||||||
|
|
||||||
|
@ -235,7 +234,8 @@ class PostProcessor(object):
|
||||||
# do the library update for synoindex
|
# do the library update for synoindex
|
||||||
notifiers.synoindex_notifier.deleteFile(cur_file)
|
notifiers.synoindex_notifier.deleteFile(cur_file)
|
||||||
|
|
||||||
def _combined_file_operation (self, file_path, new_path, new_base_name, associated_files=False, action=None, subtitles=False):
|
def _combined_file_operation(self, file_path, new_path, new_base_name, associated_files=False, action=None,
|
||||||
|
subtitles=False):
|
||||||
"""
|
"""
|
||||||
Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location,
|
Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location,
|
||||||
and optionally move associated files too.
|
and optionally move associated files too.
|
||||||
|
@ -321,7 +321,8 @@ class PostProcessor(object):
|
||||||
self._log("Unable to move file " + cur_file_path + " to " + new_file_path + ": " + str(e), logger.ERROR)
|
self._log("Unable to move file " + cur_file_path + " to " + new_file_path + ": " + str(e), logger.ERROR)
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move, subtitles=subtitles)
|
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move,
|
||||||
|
subtitles=subtitles)
|
||||||
|
|
||||||
def _copy(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
def _copy(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
||||||
"""
|
"""
|
||||||
|
@ -331,7 +332,7 @@ class PostProcessor(object):
|
||||||
associated_files: Boolean, whether we should copy similarly-named files too
|
associated_files: Boolean, whether we should copy similarly-named files too
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _int_copy (cur_file_path, new_file_path):
|
def _int_copy(cur_file_path, new_file_path):
|
||||||
|
|
||||||
self._log(u"Copying file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
|
self._log(u"Copying file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
|
||||||
try:
|
try:
|
||||||
|
@ -341,7 +342,8 @@ class PostProcessor(object):
|
||||||
logger.log("Unable to copy file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
logger.log("Unable to copy file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy, subtitles=subtitles)
|
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy,
|
||||||
|
subtitles=subtitles)
|
||||||
|
|
||||||
|
|
||||||
def _hardlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
def _hardlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
||||||
|
@ -361,6 +363,7 @@ class PostProcessor(object):
|
||||||
except (IOError, OSError), e:
|
except (IOError, OSError), e:
|
||||||
self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link)
|
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_hard_link)
|
||||||
|
|
||||||
def _moveAndSymlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
def _moveAndSymlink(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
|
||||||
|
@ -380,7 +383,9 @@ class PostProcessor(object):
|
||||||
except (IOError, OSError), e:
|
except (IOError, OSError), e:
|
||||||
self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
|
||||||
raise e
|
raise e
|
||||||
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move_and_sym_link)
|
|
||||||
|
self._combined_file_operation(file_path, new_path, new_base_name, associated_files,
|
||||||
|
action=_int_move_and_sym_link)
|
||||||
|
|
||||||
def _history_lookup(self):
|
def _history_lookup(self):
|
||||||
"""
|
"""
|
||||||
|
@ -476,7 +481,8 @@ class PostProcessor(object):
|
||||||
|
|
||||||
# remember whether it's a proper
|
# remember whether it's a proper
|
||||||
if parse_result.extra_info:
|
if parse_result.extra_info:
|
||||||
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None
|
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info,
|
||||||
|
re.I) != None
|
||||||
|
|
||||||
# if the result is complete then remember that for later
|
# if the result is complete then remember that for later
|
||||||
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
|
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
|
||||||
|
@ -488,9 +494,11 @@ class PostProcessor(object):
|
||||||
elif test_name == self.file_name:
|
elif test_name == self.file_name:
|
||||||
self.good_results[self.FILE_NAME] = True
|
self.good_results[self.FILE_NAME] = True
|
||||||
else:
|
else:
|
||||||
logger.log(u"Nothing was good, found " + repr(test_name) + " and wanted either " + repr(self.nzb_name) + ", " + repr(self.folder_name) + ", or " + repr(self.file_name))
|
logger.log(u"Nothing was good, found " + repr(test_name) + " and wanted either " + repr(
|
||||||
|
self.nzb_name) + ", " + repr(self.folder_name) + ", or " + repr(self.file_name))
|
||||||
else:
|
else:
|
||||||
logger.log(u"Parse result not sufficient(all following have to be set). Will not save release name", logger.DEBUG)
|
logger.log(u"Parse result not sufficient(all following have to be set). Will not save release name",
|
||||||
|
logger.DEBUG)
|
||||||
logger.log("Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
|
logger.log("Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
|
||||||
logger.log("Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
|
logger.log("Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
|
||||||
logger.log("Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
|
logger.log("Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
|
||||||
|
@ -507,7 +515,7 @@ class PostProcessor(object):
|
||||||
|
|
||||||
# see if we can find the name directly in the DB, if so use it
|
# see if we can find the name directly in the DB, if so use it
|
||||||
for cur_name in name_list:
|
for cur_name in name_list:
|
||||||
self._log(u"Looking up " + cur_name +u" in the DB", logger.DEBUG)
|
self._log(u"Looking up " + cur_name + u" in the DB", logger.DEBUG)
|
||||||
db_result = helpers.searchDBForShow(cur_name)
|
db_result = helpers.searchDBForShow(cur_name)
|
||||||
if db_result:
|
if db_result:
|
||||||
self._log(u"Lookup successful, using " + db_result[0] + " id " + str(db_result[1]), logger.DEBUG)
|
self._log(u"Lookup successful, using " + db_result[0] + " id " + str(db_result[1]), logger.DEBUG)
|
||||||
|
@ -521,11 +529,12 @@ class PostProcessor(object):
|
||||||
|
|
||||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
self._log(u"Looking up name " + cur_name + u" on " + self.indexer + "", logger.DEBUG)
|
self._log(u"Looking up name " + cur_name + u" on " + sickbeard.indexerApi(self.indexer).name + "",
|
||||||
|
logger.DEBUG)
|
||||||
showObj = t[cur_name]
|
showObj = t[cur_name]
|
||||||
except (indexer_exceptions.indexer_exception, IOError):
|
except (sickbeard.indexer_exception, IOError):
|
||||||
# if none found, search on all languages
|
# if none found, search on all languages
|
||||||
try:
|
try:
|
||||||
lINDEXER_API_PARMS = {'indexer': self.indexer}
|
lINDEXER_API_PARMS = {'indexer': self.indexer}
|
||||||
|
@ -533,18 +542,21 @@ class PostProcessor(object):
|
||||||
lINDEXER_API_PARMS['search_all_languages'] = True
|
lINDEXER_API_PARMS['search_all_languages'] = True
|
||||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
self._log(u"Looking up name " + cur_name + u" in all languages on " + self.indexer + "", logger.DEBUG)
|
self._log(u"Looking up name " + cur_name + u" in all languages on " + sickbeard.indexerApi(
|
||||||
|
self.indexer).name + "", logger.DEBUG)
|
||||||
showObj = t[cur_name]
|
showObj = t[cur_name]
|
||||||
except (indexer_exceptions.indexer_exception, IOError):
|
except (sickbeard.indexer_exception, IOError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
continue
|
continue
|
||||||
except (IOError):
|
except (IOError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self._log(u"Lookup successful, using " + self.indexer + " id " + str(showObj["id"]), logger.DEBUG)
|
self._log(
|
||||||
|
u"Lookup successful, using " + sickbeard.indexerApi(self.indexer).name + " id " + str(showObj["id"]),
|
||||||
|
logger.DEBUG)
|
||||||
_finalize(parse_result)
|
_finalize(parse_result)
|
||||||
return (int(showObj["id"]), season, episodes)
|
return (int(showObj["id"]), season, episodes)
|
||||||
|
|
||||||
|
@ -598,13 +610,14 @@ class PostProcessor(object):
|
||||||
|
|
||||||
# for air-by-date shows we need to look up the season/episode from tvdb
|
# for air-by-date shows we need to look up the season/episode from tvdb
|
||||||
if season == -1 and indexer_id and episodes:
|
if season == -1 and indexer_id and episodes:
|
||||||
self._log(u"Looks like this is an air-by-date show, attempting to convert the date to season/episode", logger.DEBUG)
|
self._log(u"Looks like this is an air-by-date show, attempting to convert the date to season/episode",
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
# try to get language set for this show
|
# try to get language set for this show
|
||||||
indexer_lang = None
|
indexer_lang = None
|
||||||
try:
|
try:
|
||||||
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
||||||
if(showObj != None):
|
if (showObj != None):
|
||||||
# set the language of the show
|
# set the language of the show
|
||||||
indexer_lang = showObj.lang
|
indexer_lang = showObj.lang
|
||||||
self.indexer = showObj.indexer
|
self.indexer = showObj.indexer
|
||||||
|
@ -617,7 +630,7 @@ class PostProcessor(object):
|
||||||
if indexer_lang and not indexer_lang == 'en':
|
if indexer_lang and not indexer_lang == 'en':
|
||||||
lINDEXER_API_PARMS = {'language': indexer_lang}
|
lINDEXER_API_PARMS = {'language': indexer_lang}
|
||||||
|
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
epObj = t[indexer_id].airedOn(episodes[0])[0]
|
epObj = t[indexer_id].airedOn(episodes[0])[0]
|
||||||
|
|
||||||
|
@ -625,22 +638,28 @@ class PostProcessor(object):
|
||||||
episodes = [int(epObj["episodenumber"])]
|
episodes = [int(epObj["episodenumber"])]
|
||||||
|
|
||||||
self._log(u"Got season " + str(season) + " episodes " + str(episodes), logger.DEBUG)
|
self._log(u"Got season " + str(season) + " episodes " + str(episodes), logger.DEBUG)
|
||||||
except indexer_exceptions.indexer_episodenotfound, e:
|
except sickbeard.indexer_episodenotfound, e:
|
||||||
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(indexer_id) + u", skipping", logger.DEBUG)
|
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(
|
||||||
|
indexer_id) + u", skipping", logger.DEBUG)
|
||||||
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
|
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
|
||||||
episodes = []
|
episodes = []
|
||||||
continue
|
continue
|
||||||
except indexer_exceptions.indexer_error, e:
|
except sickbeard.indexer_error, e:
|
||||||
logger.log(u"Unable to contact " + self.indexer + ": " + ex(e), logger.WARNING)
|
logger.log(u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e),
|
||||||
|
logger.WARNING)
|
||||||
episodes = []
|
episodes = []
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if there's no season then we can hopefully just use 1 automatically
|
# if there's no season then we can hopefully just use 1 automatically
|
||||||
elif season == None and indexer_id:
|
elif season == None and indexer_id:
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [indexer_id])
|
numseasonsSQlResult = myDB.select(
|
||||||
|
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0",
|
||||||
|
[indexer_id])
|
||||||
if int(numseasonsSQlResult[0][0]) == 1 and season == None:
|
if int(numseasonsSQlResult[0][0]) == 1 and season == None:
|
||||||
self._log(u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...", logger.DEBUG)
|
self._log(
|
||||||
|
u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...",
|
||||||
|
logger.DEBUG)
|
||||||
season = 1
|
season = 1
|
||||||
|
|
||||||
if indexer_id and season != None and episodes:
|
if indexer_id and season != None and episodes:
|
||||||
|
@ -671,7 +690,8 @@ class PostProcessor(object):
|
||||||
|
|
||||||
# if we can't find the show then there's nothing we can really do
|
# if we can't find the show then there's nothing we can really do
|
||||||
if not show_obj:
|
if not show_obj:
|
||||||
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode", logger.ERROR)
|
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
|
||||||
|
logger.ERROR)
|
||||||
raise exceptions.PostProcessingFailed()
|
raise exceptions.PostProcessingFailed()
|
||||||
|
|
||||||
root_ep = None
|
root_ep = None
|
||||||
|
@ -713,7 +733,9 @@ class PostProcessor(object):
|
||||||
if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST:
|
if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST:
|
||||||
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable
|
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable
|
||||||
if ep_quality != common.Quality.UNKNOWN:
|
if ep_quality != common.Quality.UNKNOWN:
|
||||||
self._log(u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
|
self._log(
|
||||||
|
u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality],
|
||||||
|
logger.DEBUG)
|
||||||
return ep_quality
|
return ep_quality
|
||||||
|
|
||||||
# nzb name is the most reliable if it exists, followed by folder name and lastly file name
|
# nzb name is the most reliable if it exists, followed by folder name and lastly file name
|
||||||
|
@ -727,18 +749,24 @@ class PostProcessor(object):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ep_quality = common.Quality.nameQuality(cur_name)
|
ep_quality = common.Quality.nameQuality(cur_name)
|
||||||
self._log(u"Looking up quality for name " + cur_name + u", got " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
|
self._log(
|
||||||
|
u"Looking up quality for name " + cur_name + u", got " + common.Quality.qualityStrings[ep_quality],
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
# if we find a good one then use it
|
# if we find a good one then use it
|
||||||
if ep_quality != common.Quality.UNKNOWN:
|
if ep_quality != common.Quality.UNKNOWN:
|
||||||
logger.log(cur_name + u" looks like it has quality " + common.Quality.qualityStrings[ep_quality] + ", using that", logger.DEBUG)
|
logger.log(cur_name + u" looks like it has quality " + common.Quality.qualityStrings[
|
||||||
|
ep_quality] + ", using that", logger.DEBUG)
|
||||||
return ep_quality
|
return ep_quality
|
||||||
|
|
||||||
# if we didn't get a quality from one of the names above, try assuming from each of the names
|
# if we didn't get a quality from one of the names above, try assuming from each of the names
|
||||||
ep_quality = common.Quality.assumeQuality(self.file_name)
|
ep_quality = common.Quality.assumeQuality(self.file_name)
|
||||||
self._log(u"Guessing quality for name " + self.file_name+u", got " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
|
self._log(
|
||||||
|
u"Guessing quality for name " + self.file_name + u", got " + common.Quality.qualityStrings[ep_quality],
|
||||||
|
logger.DEBUG)
|
||||||
if ep_quality != common.Quality.UNKNOWN:
|
if ep_quality != common.Quality.UNKNOWN:
|
||||||
logger.log(self.file_name + u" looks like it has quality " + common.Quality.qualityStrings[ep_quality] + ", using that", logger.DEBUG)
|
logger.log(self.file_name + u" looks like it has quality " + common.Quality.qualityStrings[
|
||||||
|
ep_quality] + ", using that", logger.DEBUG)
|
||||||
return ep_quality
|
return ep_quality
|
||||||
|
|
||||||
test = str(ep_quality)
|
test = str(ep_quality)
|
||||||
|
@ -757,12 +785,14 @@ class PostProcessor(object):
|
||||||
script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0])
|
script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0])
|
||||||
self._log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)
|
self._log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)
|
||||||
|
|
||||||
script_cmd = script_cmd + [ep_obj.location, self.file_path, str(ep_obj.show.indexerid), str(ep_obj.season), str(ep_obj.episode), str(ep_obj.airdate)]
|
script_cmd = script_cmd + [ep_obj.location, self.file_path, str(ep_obj.show.indexerid), str(ep_obj.season),
|
||||||
|
str(ep_obj.episode), str(ep_obj.airdate)]
|
||||||
|
|
||||||
# use subprocess to run the command and capture output
|
# use subprocess to run the command and capture output
|
||||||
self._log(u"Executing command " + str(script_cmd))
|
self._log(u"Executing command " + str(script_cmd))
|
||||||
try:
|
try:
|
||||||
p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
|
||||||
out, err = p.communicate() # @UnusedVariable
|
out, err = p.communicate() # @UnusedVariable
|
||||||
self._log(u"Script result: " + str(out), logger.DEBUG)
|
self._log(u"Script result: " + str(out), logger.DEBUG)
|
||||||
|
|
||||||
|
@ -795,12 +825,15 @@ class PostProcessor(object):
|
||||||
|
|
||||||
# if the user downloaded it manually and it's higher quality than the existing episode then it's priority
|
# if the user downloaded it manually and it's higher quality than the existing episode then it's priority
|
||||||
if new_ep_quality > old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
|
if new_ep_quality > old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
|
||||||
self._log(u"This was manually downloaded but it appears to be better quality than what we have so I'm marking it as priority", logger.DEBUG)
|
self._log(
|
||||||
|
u"This was manually downloaded but it appears to be better quality than what we have so I'm marking it as priority",
|
||||||
|
logger.DEBUG)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# if the user downloaded it manually and it appears to be a PROPER/REPACK then it's priority
|
# if the user downloaded it manually and it appears to be a PROPER/REPACK then it's priority
|
||||||
if self.is_proper and new_ep_quality >= old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
|
if self.is_proper and new_ep_quality >= old_ep_quality and new_ep_quality != common.Quality.UNKNOWN:
|
||||||
self._log(u"This was manually downloaded but it appears to be a proper so I'm marking it as priority", logger.DEBUG)
|
self._log(u"This was manually downloaded but it appears to be a proper so I'm marking it as priority",
|
||||||
|
logger.DEBUG)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
@ -825,7 +858,7 @@ class PostProcessor(object):
|
||||||
# try to find the file info
|
# try to find the file info
|
||||||
indexer_id = season = episodes = None
|
indexer_id = season = episodes = None
|
||||||
if 'auto' in self.indexer:
|
if 'auto' in self.indexer:
|
||||||
for indexer in indexerStrings:
|
for indexer in sickbeard.indexerApi().indexers:
|
||||||
self.indexer = indexer
|
self.indexer = indexer
|
||||||
|
|
||||||
# try to find the file info
|
# try to find the file info
|
||||||
|
@ -833,7 +866,8 @@ class PostProcessor(object):
|
||||||
if indexer_id and season != None and episodes:
|
if indexer_id and season != None and episodes:
|
||||||
break
|
break
|
||||||
|
|
||||||
self._log(u"Can't find show on " + self.indexer + ", auto trying next indexer in list", logger.WARNING)
|
self._log(u"Can't find show on " + sickbeard.indexerApi(
|
||||||
|
self.indexer).name + ", auto trying next indexer in list", logger.WARNING)
|
||||||
else:
|
else:
|
||||||
(indexer_id, season, episodes) = self._find_info()
|
(indexer_id, season, episodes) = self._find_info()
|
||||||
|
|
||||||
|
@ -865,17 +899,22 @@ class PostProcessor(object):
|
||||||
|
|
||||||
# if there's an existing file that we don't want to replace stop here
|
# if there's an existing file that we don't want to replace stop here
|
||||||
if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME):
|
if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME):
|
||||||
self._log(u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.ERROR)
|
self._log(
|
||||||
|
u"File exists and we are not going to replace it because it's not smaller, quitting post-processing",
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
elif existing_file_status == PostProcessor.EXISTS_SMALLER:
|
elif existing_file_status == PostProcessor.EXISTS_SMALLER:
|
||||||
self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG)
|
self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG)
|
||||||
elif existing_file_status != PostProcessor.DOESNT_EXIST:
|
elif existing_file_status != PostProcessor.DOESNT_EXIST:
|
||||||
self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR)
|
self._log(u"Unknown existing file status. This should never happen, please log this as a bug.",
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# if the file is priority then we're going to replace it even if it exists
|
# if the file is priority then we're going to replace it even if it exists
|
||||||
else:
|
else:
|
||||||
self._log(u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG)
|
self._log(
|
||||||
|
u"This download is marked a priority download so I'm going to replace an existing file if I find one",
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
# delete the existing file (and company)
|
# delete the existing file (and company)
|
||||||
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
||||||
|
@ -883,7 +922,8 @@ class PostProcessor(object):
|
||||||
self._delete(cur_ep.location, associated_files=True)
|
self._delete(cur_ep.location, associated_files=True)
|
||||||
# clean up any left over folders
|
# clean up any left over folders
|
||||||
if cur_ep.location:
|
if cur_ep.location:
|
||||||
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location)
|
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location),
|
||||||
|
keep_dir=ep_obj.show._location)
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
raise exceptions.PostProcessingFailed("Unable to delete the existing files")
|
raise exceptions.PostProcessingFailed("Unable to delete the existing files")
|
||||||
|
|
||||||
|
@ -953,7 +993,8 @@ class PostProcessor(object):
|
||||||
|
|
||||||
dest_path = ek.ek(os.path.dirname, proper_absolute_path)
|
dest_path = ek.ek(os.path.dirname, proper_absolute_path)
|
||||||
except exceptions.ShowDirNotFoundException:
|
except exceptions.ShowDirNotFoundException:
|
||||||
raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting")
|
raise exceptions.PostProcessingFailed(
|
||||||
|
u"Unable to post-process an episode if the show dir doesn't exist, quitting")
|
||||||
|
|
||||||
self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG)
|
self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG)
|
||||||
|
|
||||||
|
@ -974,13 +1015,17 @@ class PostProcessor(object):
|
||||||
try:
|
try:
|
||||||
# move the episode and associated files to the show dir
|
# move the episode and associated files to the show dir
|
||||||
if self.process_method == "copy":
|
if self.process_method == "copy":
|
||||||
self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
||||||
|
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||||
elif self.process_method == "move":
|
elif self.process_method == "move":
|
||||||
self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
||||||
|
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||||
elif self.process_method == "hardlink":
|
elif self.process_method == "hardlink":
|
||||||
self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
||||||
|
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||||
elif self.process_method == "symlink":
|
elif self.process_method == "symlink":
|
||||||
self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES,
|
||||||
|
sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Unknown process method: " + sickbeard.PROCESS_METHOD, logger.ERROR)
|
logger.log(u"Unknown process method: " + sickbeard.PROCESS_METHOD, logger.ERROR)
|
||||||
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
|
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
|
||||||
|
|
|
@ -36,11 +36,14 @@ from sickbeard import failedProcessor
|
||||||
from lib.unrar2 import RarFile, RarInfo
|
from lib.unrar2 import RarFile, RarInfo
|
||||||
from lib.unrar2.rar_exceptions import *
|
from lib.unrar2.rar_exceptions import *
|
||||||
|
|
||||||
def logHelper (logMessage, logLevel=logger.MESSAGE):
|
|
||||||
|
def logHelper(logMessage, logLevel=logger.MESSAGE):
|
||||||
logger.log(logMessage, logLevel)
|
logger.log(logMessage, logLevel)
|
||||||
return logMessage + u"\n"
|
return logMessage + u"\n"
|
||||||
|
|
||||||
def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="auto", indexer="auto"):
|
|
||||||
|
def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="auto",
|
||||||
|
indexer="auto"):
|
||||||
"""
|
"""
|
||||||
Scans through the files in dirName and processes whatever media files it finds
|
Scans through the files in dirName and processes whatever media files it finds
|
||||||
|
|
||||||
|
@ -72,7 +75,9 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
|
||||||
|
|
||||||
# if we didn't find a real dir then quit
|
# if we didn't find a real dir then quit
|
||||||
if not ek.ek(os.path.isdir, dirName):
|
if not ek.ek(os.path.isdir, dirName):
|
||||||
returnStr += logHelper(u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.", logger.DEBUG)
|
returnStr += logHelper(
|
||||||
|
u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.",
|
||||||
|
logger.DEBUG)
|
||||||
return returnStr
|
return returnStr
|
||||||
|
|
||||||
path, dirs, files = get_path_dir_files(dirName, nzbName, type)
|
path, dirs, files = get_path_dir_files(dirName, nzbName, type)
|
||||||
|
@ -128,26 +133,28 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
|
||||||
#Don't Link media when the media is extracted from a rar in the same path
|
#Don't Link media when the media is extracted from a rar in the same path
|
||||||
if process_method in ('hardlink', 'symlink') and videoInRar:
|
if process_method in ('hardlink', 'symlink') and videoInRar:
|
||||||
process_media(processPath, videoInRar, nzbName, 'move', force, is_priority, indexer)
|
process_media(processPath, videoInRar, nzbName, 'move', force, is_priority, indexer)
|
||||||
process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force, is_priority, indexer)
|
process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force,
|
||||||
|
is_priority, indexer)
|
||||||
delete_files(processPath, rarContent)
|
delete_files(processPath, rarContent)
|
||||||
else:
|
else:
|
||||||
process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer)
|
process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer)
|
||||||
|
|
||||||
#Delete all file not needed
|
#Delete all file not needed
|
||||||
if process_method != "move" or not process_result \
|
if process_method != "move" or not process_result \
|
||||||
or type=="manual": #Avoid to delete files if is Manual PostProcessing
|
or type == "manual": #Avoid to delete files if is Manual PostProcessing
|
||||||
continue
|
continue
|
||||||
|
|
||||||
delete_files(processPath, notwantedFiles)
|
delete_files(processPath, notwantedFiles)
|
||||||
|
|
||||||
if process_method == "move" and \
|
if process_method == "move" and \
|
||||||
ek.ek(os.path.normpath, processPath) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
|
ek.ek(os.path.normpath, processPath) != ek.ek(os.path.normpath,
|
||||||
|
sickbeard.TV_DOWNLOAD_DIR):
|
||||||
delete_dir(processPath)
|
delete_dir(processPath)
|
||||||
|
|
||||||
return returnStr
|
return returnStr
|
||||||
|
|
||||||
def validateDir(path, dirName, nzbNameOriginal, failed):
|
|
||||||
|
|
||||||
|
def validateDir(path, dirName, nzbNameOriginal, failed):
|
||||||
global process_result, returnStr
|
global process_result, returnStr
|
||||||
|
|
||||||
returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)
|
returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)
|
||||||
|
@ -156,10 +163,12 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
|
||||||
returnStr += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG)
|
returnStr += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG)
|
||||||
failed = True
|
failed = True
|
||||||
elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'):
|
elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'):
|
||||||
returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG)
|
returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized.",
|
||||||
|
logger.DEBUG)
|
||||||
failed = True
|
failed = True
|
||||||
elif ek.ek(os.path.basename, dirName).startswith('_UNPACK_'):
|
elif ek.ek(os.path.basename, dirName).startswith('_UNPACK_'):
|
||||||
returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG)
|
returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked.",
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
process_failed(os.path.join(path, dirName), nzbNameOriginal)
|
process_failed(os.path.join(path, dirName), nzbNameOriginal)
|
||||||
|
@ -169,8 +178,12 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
sqlResults = myDB.select("SELECT * FROM tv_shows")
|
sqlResults = myDB.select("SELECT * FROM tv_shows")
|
||||||
for sqlShow in sqlResults:
|
for sqlShow in sqlResults:
|
||||||
if dirName.lower().startswith(ek.ek(os.path.realpath, sqlShow["location"]).lower()+os.sep) or dirName.lower() == ek.ek(os.path.realpath, sqlShow["location"]).lower():
|
if dirName.lower().startswith(
|
||||||
returnStr += logHelper(u"You're trying to post process an episode that's already been moved to its show dir, skipping", logger.ERROR)
|
ek.ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or dirName.lower() == ek.ek(
|
||||||
|
os.path.realpath, sqlShow["location"]).lower():
|
||||||
|
returnStr += logHelper(
|
||||||
|
u"You're trying to post process an episode that's already been moved to its show dir, skipping",
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Get the videofile list for the next checks
|
# Get the videofile list for the next checks
|
||||||
|
@ -211,8 +224,8 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def unRAR(path, rarFiles, force):
|
|
||||||
|
|
||||||
|
def unRAR(path, rarFiles, force):
|
||||||
global process_result, returnStr
|
global process_result, returnStr
|
||||||
|
|
||||||
unpacked_files = []
|
unpacked_files = []
|
||||||
|
@ -232,14 +245,16 @@ def unRAR(path, rarFiles, force):
|
||||||
skip_file = False
|
skip_file = False
|
||||||
for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]:
|
for file_in_archive in [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]:
|
||||||
if already_postprocessed(path, file_in_archive, force):
|
if already_postprocessed(path, file_in_archive, force):
|
||||||
returnStr += logHelper(u"Archive file already post-processed, extraction skipped: " + file_in_archive, logger.DEBUG)
|
returnStr += logHelper(
|
||||||
|
u"Archive file already post-processed, extraction skipped: " + file_in_archive,
|
||||||
|
logger.DEBUG)
|
||||||
skip_file = True
|
skip_file = True
|
||||||
break
|
break
|
||||||
|
|
||||||
if skip_file:
|
if skip_file:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
rar_handle.extract(path = path, withSubpath = False, overwrite = False)
|
rar_handle.extract(path=path, withSubpath=False, overwrite=False)
|
||||||
unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]
|
unpacked_files += [os.path.basename(x.filename) for x in rar_handle.infolist() if not x.isdir]
|
||||||
del rar_handle
|
del rar_handle
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
|
@ -251,8 +266,8 @@ def unRAR(path, rarFiles, force):
|
||||||
|
|
||||||
return unpacked_files
|
return unpacked_files
|
||||||
|
|
||||||
def already_postprocessed(dirName, videofile, force):
|
|
||||||
|
|
||||||
|
def already_postprocessed(dirName, videofile, force):
|
||||||
global returnStr
|
global returnStr
|
||||||
|
|
||||||
if force:
|
if force:
|
||||||
|
@ -266,7 +281,8 @@ def already_postprocessed(dirName, videofile, force):
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName])
|
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [dirName])
|
||||||
if sqlResult:
|
if sqlResult:
|
||||||
returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping", logger.DEBUG)
|
returnStr += logHelper(u"You're trying to post process a dir that's already been processed, skipping",
|
||||||
|
logger.DEBUG)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# This is needed for video whose name differ from dirName
|
# This is needed for video whose name differ from dirName
|
||||||
|
@ -275,7 +291,8 @@ def already_postprocessed(dirName, videofile, force):
|
||||||
|
|
||||||
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]])
|
sqlResult = myDB.select("SELECT * FROM tv_episodes WHERE release_name = ?", [videofile.rpartition('.')[0]])
|
||||||
if sqlResult:
|
if sqlResult:
|
||||||
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG)
|
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
|
||||||
|
logger.DEBUG)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
#Needed if we have downloaded the same episode @ different quality
|
#Needed if we have downloaded the same episode @ different quality
|
||||||
|
@ -285,13 +302,14 @@ def already_postprocessed(dirName, videofile, force):
|
||||||
search_sql += " and history.resource LIKE ?"
|
search_sql += " and history.resource LIKE ?"
|
||||||
sqlResult = myDB.select(search_sql, [u'%' + videofile])
|
sqlResult = myDB.select(search_sql, [u'%' + videofile])
|
||||||
if sqlResult:
|
if sqlResult:
|
||||||
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping", logger.DEBUG)
|
returnStr += logHelper(u"You're trying to post process a video that's already been processed, skipping",
|
||||||
|
logger.DEBUG)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer):
|
|
||||||
|
|
||||||
|
def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, indexer):
|
||||||
global process_result, returnStr
|
global process_result, returnStr
|
||||||
|
|
||||||
for cur_video_file in videoFiles:
|
for cur_video_file in videoFiles:
|
||||||
|
@ -314,14 +332,15 @@ def process_media(processPath, videoFiles, nzbName, process_method, force, is_pr
|
||||||
if process_result:
|
if process_result:
|
||||||
returnStr += logHelper(u"Processing succeeded for " + cur_video_file_path)
|
returnStr += logHelper(u"Processing succeeded for " + cur_video_file_path)
|
||||||
else:
|
else:
|
||||||
returnStr += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message, logger.WARNING)
|
returnStr += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message,
|
||||||
|
logger.WARNING)
|
||||||
|
|
||||||
#If something fail abort the processing on dir
|
#If something fail abort the processing on dir
|
||||||
if not process_result:
|
if not process_result:
|
||||||
break
|
break
|
||||||
|
|
||||||
def delete_files(processPath, notwantedFiles):
|
|
||||||
|
|
||||||
|
def delete_files(processPath, notwantedFiles):
|
||||||
global returnStr, process_result
|
global returnStr, process_result
|
||||||
|
|
||||||
if not process_result:
|
if not process_result:
|
||||||
|
@ -343,20 +362,22 @@ def delete_files(processPath, notwantedFiles):
|
||||||
# File is read-only, so make it writeable
|
# File is read-only, so make it writeable
|
||||||
returnStr += logHelper(u"Changing ReadOnly Flag for file " + cur_file, logger.DEBUG)
|
returnStr += logHelper(u"Changing ReadOnly Flag for file " + cur_file, logger.DEBUG)
|
||||||
try:
|
try:
|
||||||
ek.ek(os.chmod,cur_file_path,stat.S_IWRITE)
|
ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
returnStr += logHelper(u"Cannot change permissions of " + cur_file_path + ': ' + e.strerror, logger.DEBUG)
|
returnStr += logHelper(u"Cannot change permissions of " + cur_file_path + ': ' + e.strerror,
|
||||||
|
logger.DEBUG)
|
||||||
try:
|
try:
|
||||||
ek.ek(os.remove, cur_file_path)
|
ek.ek(os.remove, cur_file_path)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
returnStr += logHelper(u"Unable to delete file " + cur_file + ': ' + e.strerror, logger.DEBUG)
|
returnStr += logHelper(u"Unable to delete file " + cur_file + ': ' + e.strerror, logger.DEBUG)
|
||||||
|
|
||||||
def delete_dir(processPath):
|
|
||||||
|
|
||||||
|
def delete_dir(processPath):
|
||||||
global returnStr
|
global returnStr
|
||||||
|
|
||||||
if not ek.ek(os.listdir, processPath) == []:
|
if not ek.ek(os.listdir, processPath) == []:
|
||||||
returnStr += logHelper(u"Skipping Deleting folder " + processPath + ' because some files was not deleted/processed', logger.DEBUG)
|
returnStr += logHelper(
|
||||||
|
u"Skipping Deleting folder " + processPath + ' because some files was not deleted/processed', logger.DEBUG)
|
||||||
return
|
return
|
||||||
|
|
||||||
returnStr += logHelper(u"Deleting folder " + processPath, logger.DEBUG)
|
returnStr += logHelper(u"Deleting folder " + processPath, logger.DEBUG)
|
||||||
|
@ -366,15 +387,16 @@ def delete_dir(processPath):
|
||||||
except (OSError, IOError), e:
|
except (OSError, IOError), e:
|
||||||
returnStr += logHelper(u"Warning: unable to remove the folder " + processPath + ": " + ex(e), logger.WARNING)
|
returnStr += logHelper(u"Warning: unable to remove the folder " + processPath + ": " + ex(e), logger.WARNING)
|
||||||
|
|
||||||
def get_path_dir_files(dirName, nzbName, type):
|
|
||||||
|
|
||||||
if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or type =="manual": #Scheduled Post Processing Active
|
def get_path_dir_files(dirName, nzbName, type):
|
||||||
|
if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or type == "manual": #Scheduled Post Processing Active
|
||||||
#Get at first all the subdir in the dirName
|
#Get at first all the subdir in the dirName
|
||||||
for path, dirs, files in ek.ek(os.walk, dirName):
|
for path, dirs, files in ek.ek(os.walk, dirName):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
path, dirs = ek.ek(os.path.split, dirName) #Script Post Processing
|
path, dirs = ek.ek(os.path.split, dirName) #Script Post Processing
|
||||||
if not nzbName is None and not nzbName.endswith('.nzb') and os.path.isfile(os.path.join(dirName, nzbName)): #For single torrent file without Dir
|
if not nzbName is None and not nzbName.endswith('.nzb') and os.path.isfile(
|
||||||
|
os.path.join(dirName, nzbName)): #For single torrent file without Dir
|
||||||
dirs = []
|
dirs = []
|
||||||
files = [os.path.join(dirName, nzbName)]
|
files = [os.path.join(dirName, nzbName)]
|
||||||
else:
|
else:
|
||||||
|
@ -383,6 +405,7 @@ def get_path_dir_files(dirName, nzbName, type):
|
||||||
|
|
||||||
return path, dirs, files
|
return path, dirs, files
|
||||||
|
|
||||||
|
|
||||||
def process_failed(dirName, nzbName):
|
def process_failed(dirName, nzbName):
|
||||||
"""Process a download that did not complete correctly"""
|
"""Process a download that did not complete correctly"""
|
||||||
|
|
||||||
|
@ -405,4 +428,6 @@ def process_failed(dirName, nzbName):
|
||||||
if process_result:
|
if process_result:
|
||||||
returnStr += logHelper(u"Failed Download Processing succeeded: (" + str(nzbName) + ", " + dirName + ")")
|
returnStr += logHelper(u"Failed Download Processing succeeded: (" + str(nzbName) + ", " + dirName + ")")
|
||||||
else:
|
else:
|
||||||
returnStr += logHelper(u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message, logger.WARNING)
|
returnStr += logHelper(
|
||||||
|
u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message,
|
||||||
|
logger.WARNING)
|
||||||
|
|
|
@ -31,14 +31,10 @@ from sickbeard import history
|
||||||
|
|
||||||
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality
|
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality
|
||||||
|
|
||||||
|
|
||||||
from sickbeard.indexers import indexer_api, indexer_exceptions
|
|
||||||
|
|
||||||
from name_parser.parser import NameParser, InvalidNameException
|
from name_parser.parser import NameParser, InvalidNameException
|
||||||
|
|
||||||
|
|
||||||
class ProperFinder():
|
class ProperFinder():
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.updateInterval = datetime.timedelta(hours=1)
|
self.updateInterval = datetime.timedelta(hours=1)
|
||||||
|
|
||||||
|
@ -56,7 +52,7 @@ class ProperFinder():
|
||||||
dayDiff = (datetime.date.today() - self._get_lastProperSearch()).days
|
dayDiff = (datetime.date.today() - self._get_lastProperSearch()).days
|
||||||
|
|
||||||
# if it's less than an interval after the update time then do an update
|
# if it's less than an interval after the update time then do an update
|
||||||
if hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600 or dayDiff >=1:
|
if hourDiff >= 0 and hourDiff < self.updateInterval.seconds / 3600 or dayDiff >= 1:
|
||||||
logger.log(u"Beginning the search for new propers")
|
logger.log(u"Beginning the search for new propers")
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
@ -110,7 +106,9 @@ class ProperFinder():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not parse_result.episode_numbers:
|
if not parse_result.episode_numbers:
|
||||||
logger.log(u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode", logger.DEBUG)
|
logger.log(
|
||||||
|
u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# populate our Proper instance
|
# populate our Proper instance
|
||||||
|
@ -138,7 +136,9 @@ class ProperFinder():
|
||||||
|
|
||||||
# if it matches
|
# if it matches
|
||||||
if genericName == self._genericName(curSceneName):
|
if genericName == self._genericName(curSceneName):
|
||||||
logger.log(u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name, logger.DEBUG)
|
logger.log(
|
||||||
|
u"Successful match! Result " + parse_result.series_name + " matched to show " + curShow.name,
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
# set the indexerid in the db to the show's indexerid
|
# set the indexerid in the db to the show's indexerid
|
||||||
curProper.indexerid = curShow.indexerid
|
curProper.indexerid = curShow.indexerid
|
||||||
|
@ -157,7 +157,8 @@ class ProperFinder():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not show_name_helpers.filterBadReleases(curProper.name):
|
if not show_name_helpers.filterBadReleases(curProper.name):
|
||||||
logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", logger.DEBUG)
|
logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if we have an air-by-date show then get the real season/episode numbers
|
# if we have an air-by-date show then get the real season/episode numbers
|
||||||
|
@ -175,18 +176,21 @@ class ProperFinder():
|
||||||
lINDEXER_API_PARMS['language'] = indexer_lang
|
lINDEXER_API_PARMS['language'] = indexer_lang
|
||||||
|
|
||||||
try:
|
try:
|
||||||
t = indexer_api.indexerApi(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
epObj = t[curProper.indexerid].airedOn(curProper.episode)[0]
|
epObj = t[curProper.indexerid].airedOn(curProper.episode)[0]
|
||||||
|
|
||||||
curProper.season = int(epObj["seasonnumber"])
|
curProper.season = int(epObj["seasonnumber"])
|
||||||
curProper.episodes = [int(epObj["episodenumber"])]
|
curProper.episodes = [int(epObj["episodenumber"])]
|
||||||
except indexer_exceptions.indexer_episodenotfound:
|
except sickbeard.indexer_episodenotfound:
|
||||||
logger.log(u"Unable to find episode with date " + str(curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING)
|
logger.log(u"Unable to find episode with date " + str(
|
||||||
|
curProper.episode) + " for show " + parse_result.series_name + ", skipping", logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# check if we actually want this proper (if it's the right quality)
|
# check if we actually want this proper (if it's the right quality)
|
||||||
sqlResults = db.DBConnection().select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.indexerid, curProper.season, curProper.episode])
|
sqlResults = db.DBConnection().select(
|
||||||
|
"SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||||
|
[curProper.indexerid, curProper.season, curProper.episode])
|
||||||
if not sqlResults:
|
if not sqlResults:
|
||||||
continue
|
continue
|
||||||
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
|
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
|
||||||
|
@ -196,7 +200,8 @@ class ProperFinder():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
|
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
|
||||||
if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
|
if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(
|
||||||
|
operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
|
||||||
logger.log(u"Found a proper that we need: " + str(curProper.name))
|
logger.log(u"Found a proper that we need: " + str(curProper.name))
|
||||||
finalPropers.append(curProper)
|
finalPropers.append(curProper)
|
||||||
|
|
||||||
|
@ -214,11 +219,13 @@ class ProperFinder():
|
||||||
"SELECT resource FROM history "
|
"SELECT resource FROM history "
|
||||||
"WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? "
|
"WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? "
|
||||||
"AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED]) + ")",
|
"AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED]) + ")",
|
||||||
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality, historyLimit.strftime(history.dateFormat)])
|
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality,
|
||||||
|
historyLimit.strftime(history.dateFormat)])
|
||||||
|
|
||||||
# if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
|
# if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
|
||||||
if len(historyResults) == 0:
|
if len(historyResults) == 0:
|
||||||
logger.log(u"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.")
|
logger.log(
|
||||||
|
u"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -237,7 +244,8 @@ class ProperFinder():
|
||||||
# get the episode object
|
# get the episode object
|
||||||
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
|
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
|
||||||
if showObj == None:
|
if showObj == None:
|
||||||
logger.log(u"Unable to find the show with indexerid " + str(curProper.indexerid) + " so unable to download the proper", logger.ERROR)
|
logger.log(u"Unable to find the show with indexerid " + str(
|
||||||
|
curProper.indexerid) + " so unable to download the proper", logger.ERROR)
|
||||||
continue
|
continue
|
||||||
epObj = showObj.getEpisode(curProper.season, curProper.episode)
|
epObj = showObj.getEpisode(curProper.season, curProper.episode)
|
||||||
|
|
||||||
|
@ -263,7 +271,8 @@ class ProperFinder():
|
||||||
sqlResults = myDB.select("SELECT * FROM info")
|
sqlResults = myDB.select("SELECT * FROM info")
|
||||||
|
|
||||||
if len(sqlResults) == 0:
|
if len(sqlResults) == 0:
|
||||||
myDB.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)", [0, 0, str(when)])
|
myDB.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)",
|
||||||
|
[0, 0, str(when)])
|
||||||
else:
|
else:
|
||||||
myDB.action("UPDATE info SET last_proper_search=" + str(when))
|
myDB.action("UPDATE info SET last_proper_search=" + str(when))
|
||||||
|
|
||||||
|
|
|
@ -31,15 +31,15 @@ __all__ = ['ezrss',
|
||||||
'iptorrents',
|
'iptorrents',
|
||||||
'omgwtfnzbs',
|
'omgwtfnzbs',
|
||||||
'nextgen'
|
'nextgen'
|
||||||
]
|
]
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
|
|
||||||
from os import sys
|
from os import sys
|
||||||
|
|
||||||
def sortedProviderList():
|
|
||||||
|
|
||||||
|
def sortedProviderList():
|
||||||
initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList
|
initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList
|
||||||
providerDict = dict(zip([x.getID() for x in initialList], initialList))
|
providerDict = dict(zip([x.getID() for x in initialList], initialList))
|
||||||
|
|
||||||
|
@ -57,12 +57,12 @@ def sortedProviderList():
|
||||||
|
|
||||||
return newList
|
return newList
|
||||||
|
|
||||||
def makeProviderList():
|
|
||||||
|
|
||||||
|
def makeProviderList():
|
||||||
return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
|
return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
|
||||||
|
|
||||||
def getNewznabProviderList(data):
|
|
||||||
|
|
||||||
|
def getNewznabProviderList(data):
|
||||||
defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')]
|
defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')]
|
||||||
providerList = filter(lambda x: x, [makeNewznabProvider(x) for x in data.split('!!!')])
|
providerList = filter(lambda x: x, [makeNewznabProvider(x) for x in data.split('!!!')])
|
||||||
|
|
||||||
|
@ -85,7 +85,6 @@ def getNewznabProviderList(data):
|
||||||
|
|
||||||
|
|
||||||
def makeNewznabProvider(configString):
|
def makeNewznabProvider(configString):
|
||||||
|
|
||||||
if not configString:
|
if not configString:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -102,12 +101,13 @@ def makeNewznabProvider(configString):
|
||||||
|
|
||||||
return newProvider
|
return newProvider
|
||||||
|
|
||||||
|
|
||||||
def getTorrentRssProviderList(data):
|
def getTorrentRssProviderList(data):
|
||||||
providerList = filter(lambda x: x, [makeTorrentRssProvider(x) for x in data.split('!!!')])
|
providerList = filter(lambda x: x, [makeTorrentRssProvider(x) for x in data.split('!!!')])
|
||||||
return filter(lambda x: x, providerList)
|
return filter(lambda x: x, providerList)
|
||||||
|
|
||||||
def makeTorrentRssProvider(configString):
|
|
||||||
|
|
||||||
|
def makeTorrentRssProvider(configString):
|
||||||
if not configString:
|
if not configString:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -120,20 +120,24 @@ def makeTorrentRssProvider(configString):
|
||||||
|
|
||||||
return newProvider
|
return newProvider
|
||||||
|
|
||||||
|
|
||||||
def getDefaultNewznabProviders():
|
def getDefaultNewznabProviders():
|
||||||
return 'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040,5060|0!!!NZBs.org|http://nzbs.org/||5030,5040,5060,5070,5090|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0'
|
return 'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040,5060|0!!!NZBs.org|http://nzbs.org/||5030,5040,5060,5070,5090|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0'
|
||||||
|
|
||||||
|
|
||||||
def getProviderModule(name):
|
def getProviderModule(name):
|
||||||
name = name.lower()
|
name = name.lower()
|
||||||
prefix = "sickbeard.providers."
|
prefix = "sickbeard.providers."
|
||||||
if name in __all__ and prefix+name in sys.modules:
|
if name in __all__ and prefix + name in sys.modules:
|
||||||
return sys.modules[prefix+name]
|
return sys.modules[prefix + name]
|
||||||
else:
|
else:
|
||||||
raise Exception("Can't find " + prefix+name + " in " + "Providers")
|
raise Exception("Can't find " + prefix + name + " in " + "Providers")
|
||||||
|
|
||||||
|
|
||||||
def getProviderClass(id):
|
def getProviderClass(id):
|
||||||
|
providerMatch = [x for x in
|
||||||
providerMatch = [x for x in sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if x.getID() == id]
|
sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if
|
||||||
|
x.getID() == id]
|
||||||
|
|
||||||
if len(providerMatch) != 1:
|
if len(providerMatch) != 1:
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -36,7 +36,6 @@ import math
|
||||||
|
|
||||||
|
|
||||||
class BTNProvider(generic.TorrentProvider):
|
class BTNProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
generic.TorrentProvider.__init__(self, "BTN")
|
generic.TorrentProvider.__init__(self, "BTN")
|
||||||
|
@ -64,8 +63,10 @@ class BTNProvider(generic.TorrentProvider):
|
||||||
return self._checkAuth()
|
return self._checkAuth()
|
||||||
|
|
||||||
if 'api-error' in parsedJSON:
|
if 'api-error' in parsedJSON:
|
||||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['api-error'], logger.DEBUG)
|
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['api-error'],
|
||||||
raise AuthException("Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
logger.DEBUG)
|
||||||
|
raise AuthException(
|
||||||
|
"Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -150,7 +151,7 @@ class BTNProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
except Exception, error:
|
except Exception, error:
|
||||||
errorstring = str(error)
|
errorstring = str(error)
|
||||||
if(errorstring.startswith('<') and errorstring.endswith('>')):
|
if (errorstring.startswith('<') and errorstring.endswith('>')):
|
||||||
errorstring = errorstring[1:-1]
|
errorstring = errorstring[1:-1]
|
||||||
logger.log(u"Unknown error while accessing " + self.name + ": " + errorstring, logger.ERROR)
|
logger.log(u"Unknown error while accessing " + self.name + ": " + errorstring, logger.ERROR)
|
||||||
|
|
||||||
|
@ -296,7 +297,6 @@ class BTNProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
|
||||||
class BTNCache(tvcache.TVCache):
|
class BTNCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
|
||||||
|
@ -334,7 +334,8 @@ class BTNCache(tvcache.TVCache):
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise AuthException("Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
raise AuthException(
|
||||||
|
"Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
|
@ -350,7 +351,9 @@ class BTNCache(tvcache.TVCache):
|
||||||
|
|
||||||
# Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, older things will need to be done through backlog
|
# Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, older things will need to be done through backlog
|
||||||
if seconds_since_last_update > 86400:
|
if seconds_since_last_update > 86400:
|
||||||
logger.log(u"The last known successful update on " + self.provider.name + " was more than 24 hours ago, only trying to fetch the last 24 hours!", logger.WARNING)
|
logger.log(
|
||||||
|
u"The last known successful update on " + self.provider.name + " was more than 24 hours ago, only trying to fetch the last 24 hours!",
|
||||||
|
logger.WARNING)
|
||||||
seconds_since_last_update = 86400
|
seconds_since_last_update = 86400
|
||||||
|
|
||||||
data = self.provider._doSearch(search_params=None, age=seconds_since_last_update)
|
data = self.provider._doSearch(search_params=None, age=seconds_since_last_update)
|
||||||
|
@ -364,10 +367,12 @@ class BTNCache(tvcache.TVCache):
|
||||||
logger.log(u"Adding item to results: " + title, logger.DEBUG)
|
logger.log(u"Adding item to results: " + title, logger.DEBUG)
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
else:
|
else:
|
||||||
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", logger.ERROR)
|
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable",
|
||||||
|
logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _checkAuth(self, data):
|
def _checkAuth(self, data):
|
||||||
return self.provider._checkAuthFromData(data)
|
return self.provider._checkAuthFromData(data)
|
||||||
|
|
||||||
|
|
||||||
provider = BTNProvider()
|
provider = BTNProvider()
|
||||||
|
|
|
@ -31,8 +31,8 @@ from sickbeard.helpers import sanitizeSceneName, get_xml_text
|
||||||
from sickbeard import show_name_helpers
|
from sickbeard import show_name_helpers
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
class DTTProvider(generic.TorrentProvider):
|
|
||||||
|
|
||||||
|
class DTTProvider(generic.TorrentProvider):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
generic.TorrentProvider.__init__(self, "DailyTvTorrents")
|
generic.TorrentProvider.__init__(self, "DailyTvTorrents")
|
||||||
self.supportsBacklog = True
|
self.supportsBacklog = True
|
||||||
|
@ -55,13 +55,13 @@ class DTTProvider(generic.TorrentProvider):
|
||||||
return generic.TorrentProvider.findSeasonResults(self, show, season)
|
return generic.TorrentProvider.findSeasonResults(self, show, season)
|
||||||
|
|
||||||
def _dtt_show_id(self, show_name):
|
def _dtt_show_id(self, show_name):
|
||||||
return sanitizeSceneName(show_name).replace('.','-').lower()
|
return sanitizeSceneName(show_name).replace('.', '-').lower()
|
||||||
|
|
||||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||||
search_string = []
|
search_string = []
|
||||||
|
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
||||||
show_string = sanitizeSceneName(show_name).replace('.','-').lower()
|
show_string = sanitizeSceneName(show_name).replace('.', '-').lower()
|
||||||
search_string.append(show_string)
|
search_string.append(show_string)
|
||||||
|
|
||||||
return search_string
|
return search_string
|
||||||
|
@ -71,15 +71,15 @@ class DTTProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def _doSearch(self, search_params, show=None):
|
def _doSearch(self, search_params, show=None):
|
||||||
|
|
||||||
# show_id = self._dtt_show_id(show.name)
|
# show_id = self._dtt_show_id(show.name)
|
||||||
|
|
||||||
params = {"items" : "all"}
|
params = {"items": "all"}
|
||||||
|
|
||||||
if sickbeard.DTT_NORAR:
|
if sickbeard.DTT_NORAR:
|
||||||
params.update({"norar" : "yes"})
|
params.update({"norar": "yes"})
|
||||||
|
|
||||||
if sickbeard.DTT_SINGLE:
|
if sickbeard.DTT_SINGLE:
|
||||||
params.update({"single" : "yes"})
|
params.update({"single": "yes"})
|
||||||
|
|
||||||
searchURL = self.url + "rss/show/" + search_params + "?" + urllib.urlencode(params)
|
searchURL = self.url + "rss/show/" + search_params + "?" + urllib.urlencode(params)
|
||||||
|
|
||||||
|
@ -94,8 +94,8 @@ class DTTProvider(generic.TorrentProvider):
|
||||||
parsedXML = parseString(data)
|
parsedXML = parseString(data)
|
||||||
items = parsedXML.getElementsByTagName('item')
|
items = parsedXML.getElementsByTagName('item')
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"Error trying to load DTT RSS feed: "+ex(e), logger.ERROR)
|
logger.log(u"Error trying to load DTT RSS feed: " + ex(e), logger.ERROR)
|
||||||
logger.log(u"RSS data: "+data, logger.DEBUG)
|
logger.log(u"RSS data: " + data, logger.DEBUG)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
@ -114,8 +114,8 @@ class DTTProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
return (title, url)
|
return (title, url)
|
||||||
|
|
||||||
class DTTCache(tvcache.TVCache):
|
|
||||||
|
|
||||||
|
class DTTCache(tvcache.TVCache):
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
|
||||||
|
@ -124,22 +124,23 @@ class DTTCache(tvcache.TVCache):
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _getRSSData(self):
|
||||||
|
|
||||||
params = {"items" : "all"}
|
params = {"items": "all"}
|
||||||
|
|
||||||
if sickbeard.DTT_NORAR:
|
if sickbeard.DTT_NORAR:
|
||||||
params.update({"norar" : "yes"})
|
params.update({"norar": "yes"})
|
||||||
|
|
||||||
if sickbeard.DTT_SINGLE:
|
if sickbeard.DTT_SINGLE:
|
||||||
params.update({"single" : "yes"})
|
params.update({"single": "yes"})
|
||||||
|
|
||||||
url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params)
|
url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params)
|
||||||
logger.log(u"DTT cache update URL: "+ url, logger.DEBUG)
|
logger.log(u"DTT cache update URL: " + url, logger.DEBUG)
|
||||||
data = self.provider.getURL(url)
|
data = self.provider.getURL(url)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
title, url = self.provider._get_title_and_url(item)
|
title, url = self.provider._get_title_and_url(item)
|
||||||
logger.log(u"Adding item from RSS to cache: "+title, logger.DEBUG)
|
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
||||||
provider = DTTProvider()
|
provider = DTTProvider()
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
import urllib
|
import urllib
|
||||||
import re
|
import re
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import xml.etree.cElementTree as etree
|
import xml.etree.cElementTree as etree
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -33,7 +34,6 @@ from sickbeard import helpers
|
||||||
|
|
||||||
|
|
||||||
class EZRSSProvider(generic.TorrentProvider):
|
class EZRSSProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
generic.TorrentProvider.__init__(self, "EZRSS")
|
generic.TorrentProvider.__init__(self, "EZRSS")
|
||||||
|
@ -52,7 +52,8 @@ class EZRSSProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def getQuality(self, item):
|
def getQuality(self, item):
|
||||||
|
|
||||||
filename = helpers.get_xml_text(item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
filename = helpers.get_xml_text(
|
||||||
|
item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
||||||
quality = Quality.nameQuality(filename)
|
quality = Quality.nameQuality(filename)
|
||||||
|
|
||||||
return quality
|
return quality
|
||||||
|
@ -62,7 +63,8 @@ class EZRSSProvider(generic.TorrentProvider):
|
||||||
results = {}
|
results = {}
|
||||||
|
|
||||||
if show.air_by_date:
|
if show.air_by_date:
|
||||||
logger.log(self.name + u" doesn't support air-by-date backlog because of limitations on their RSS search.", logger.WARNING)
|
logger.log(self.name + u" doesn't support air-by-date backlog because of limitations on their RSS search.",
|
||||||
|
logger.WARNING)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
results = generic.TorrentProvider.findSeasonResults(self, show, season)
|
results = generic.TorrentProvider.findSeasonResults(self, show, season)
|
||||||
|
@ -134,14 +136,17 @@ class EZRSSProvider(generic.TorrentProvider):
|
||||||
logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG)
|
logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG)
|
||||||
results.append(curItem)
|
results.append(curItem)
|
||||||
else:
|
else:
|
||||||
logger.log(u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable", logger.ERROR)
|
logger.log(
|
||||||
|
u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable",
|
||||||
|
logger.ERROR)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _get_title_and_url(self, item):
|
def _get_title_and_url(self, item):
|
||||||
(title, url) = generic.TorrentProvider._get_title_and_url(self, item)
|
(title, url) = generic.TorrentProvider._get_title_and_url(self, item)
|
||||||
|
|
||||||
filename = helpers.get_xml_text(item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
filename = helpers.get_xml_text(
|
||||||
|
item.find('{http://xmlns.ezrss.it/0.1/}torrent/{http://xmlns.ezrss.it/0.1/}fileName'))
|
||||||
|
|
||||||
if filename:
|
if filename:
|
||||||
new_title = self._extract_name_from_filename(filename)
|
new_title = self._extract_name_from_filename(filename)
|
||||||
|
@ -161,7 +166,6 @@ class EZRSSProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
|
||||||
class EZRSSCache(tvcache.TVCache):
|
class EZRSSCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
@ -192,7 +196,10 @@ class EZRSSCache(tvcache.TVCache):
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.log(u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", logger.ERROR)
|
logger.log(
|
||||||
|
u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
|
||||||
|
logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
provider = EZRSSProvider()
|
provider = EZRSSProvider()
|
||||||
|
|
|
@ -31,7 +31,7 @@ import collections
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
|
||||||
from sickbeard import helpers, classes, logger, db
|
from sickbeard import helpers, classes, logger, db
|
||||||
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT#, SEED_POLICY_TIME, SEED_POLICY_RATIO
|
from sickbeard.common import Quality, MULTI_EP_RESULT, SEASON_RESULT #, SEED_POLICY_TIME, SEED_POLICY_RATIO
|
||||||
from sickbeard import tvcache
|
from sickbeard import tvcache
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
@ -40,8 +40,8 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException
|
||||||
from sickbeard import scene_numbering
|
from sickbeard import scene_numbering
|
||||||
from sickbeard.common import Quality, Overview
|
from sickbeard.common import Quality, Overview
|
||||||
|
|
||||||
class GenericProvider:
|
|
||||||
|
|
||||||
|
class GenericProvider:
|
||||||
NZB = "nzb"
|
NZB = "nzb"
|
||||||
TORRENT = "torrent"
|
TORRENT = "torrent"
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ class GenericProvider:
|
||||||
Save the result to disk.
|
Save the result to disk.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.log(u"Downloading a result from " + self.name+" at " + result.url)
|
logger.log(u"Downloading a result from " + self.name + " at " + result.url)
|
||||||
|
|
||||||
data = self.getURL(result.url)
|
data = self.getURL(result.url)
|
||||||
|
|
||||||
|
@ -229,7 +229,7 @@ class GenericProvider:
|
||||||
sceneEpisode.convertToSceneNumbering()
|
sceneEpisode.convertToSceneNumbering()
|
||||||
|
|
||||||
logger.log(u'Searching "%s" for "%s" as "%s"'
|
logger.log(u'Searching "%s" for "%s" as "%s"'
|
||||||
% (self.name, episode.prettyName() , sceneEpisode.prettyName()))
|
% (self.name, episode.prettyName(), sceneEpisode.prettyName()))
|
||||||
|
|
||||||
self.cache.updateCache()
|
self.cache.updateCache()
|
||||||
results = self.cache.searchCache(episode, manualSearch)
|
results = self.cache.searchCache(episode, manualSearch)
|
||||||
|
@ -261,16 +261,20 @@ class GenericProvider:
|
||||||
|
|
||||||
if episode.show.air_by_date:
|
if episode.show.air_by_date:
|
||||||
if parse_result.air_date != episode.airdate:
|
if parse_result.air_date != episode.airdate:
|
||||||
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG)
|
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
|
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
|
||||||
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG)
|
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(
|
||||||
|
episode.episode) + ", skipping it", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
quality = self.getQuality(item)
|
quality = self.getQuality(item)
|
||||||
|
|
||||||
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
|
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
|
||||||
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
|
logger.log(
|
||||||
|
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
|
||||||
|
quality], logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
||||||
|
@ -298,12 +302,12 @@ class GenericProvider:
|
||||||
seasonEp = show.getAllEpisodes(season)
|
seasonEp = show.getAllEpisodes(season)
|
||||||
wantedEp = [x for x in seasonEp if show.getOverview(x.status) in (Overview.WANTED, Overview.QUAL)]
|
wantedEp = [x for x in seasonEp if show.getOverview(x.status) in (Overview.WANTED, Overview.QUAL)]
|
||||||
map(lambda x: x.convertToSceneNumbering(), wantedEp)
|
map(lambda x: x.convertToSceneNumbering(), wantedEp)
|
||||||
for x in wantedEp: sceneSeasons.setdefault(x.season,[]).append(x)
|
for x in wantedEp: sceneSeasons.setdefault(x.season, []).append(x)
|
||||||
|
|
||||||
if wantedEp == seasonEp and not show.air_by_date:
|
if wantedEp == seasonEp and not show.air_by_date:
|
||||||
searchSeason = True
|
searchSeason = True
|
||||||
|
|
||||||
for sceneSeason,sceneEpisodes in sceneSeasons.iteritems():
|
for sceneSeason, sceneEpisodes in sceneSeasons.iteritems():
|
||||||
for curString in self._get_season_search_strings(show, str(sceneSeason), sceneEpisodes, searchSeason):
|
for curString in self._get_season_search_strings(show, str(sceneSeason), sceneEpisodes, searchSeason):
|
||||||
itemList += self._doSearch(curString)
|
itemList += self._doSearch(curString)
|
||||||
|
|
||||||
|
@ -323,8 +327,10 @@ class GenericProvider:
|
||||||
|
|
||||||
if not show.air_by_date:
|
if not show.air_by_date:
|
||||||
# this check is meaningless for non-season searches
|
# this check is meaningless for non-season searches
|
||||||
if (parse_result.season_number != None and parse_result.season_number != season) or (parse_result.season_number == None and season != 1):
|
if (parse_result.season_number != None and parse_result.season_number != season) or (
|
||||||
logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str(season) + ", ignoring", logger.DEBUG)
|
parse_result.season_number == None and season != 1):
|
||||||
|
logger.log(u"The result " + title + " doesn't seem to be a valid episode for season " + str(
|
||||||
|
season) + ", ignoring", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# we just use the existing info for normal searches
|
# we just use the existing info for normal searches
|
||||||
|
@ -333,14 +339,19 @@ class GenericProvider:
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if not parse_result.air_by_date:
|
if not parse_result.air_by_date:
|
||||||
logger.log(u"This is supposed to be an air-by-date search but the result "+title+" didn't parse as one, skipping it", logger.DEBUG)
|
logger.log(
|
||||||
|
u"This is supposed to be an air-by-date search but the result " + title + " didn't parse as one, skipping it",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", [show.indexerid, parse_result.air_date.toordinal()])
|
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
|
||||||
|
[show.indexerid, parse_result.air_date.toordinal()])
|
||||||
|
|
||||||
if len(sql_results) != 1:
|
if len(sql_results) != 1:
|
||||||
logger.log(u"Tried to look up the date for the episode "+title+" but the database didn't give proper results, skipping it", logger.WARNING)
|
logger.log(
|
||||||
|
u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
|
||||||
|
logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
actual_season = int(sql_results[0]["season"])
|
actual_season = int(sql_results[0]["season"])
|
||||||
|
@ -354,7 +365,9 @@ class GenericProvider:
|
||||||
break
|
break
|
||||||
|
|
||||||
if not wantEp:
|
if not wantEp:
|
||||||
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
|
logger.log(
|
||||||
|
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
|
||||||
|
quality], logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
||||||
|
@ -375,7 +388,8 @@ class GenericProvider:
|
||||||
epNum = epObj[0].episode
|
epNum = epObj[0].episode
|
||||||
elif len(epObj) > 1:
|
elif len(epObj) > 1:
|
||||||
epNum = MULTI_EP_RESULT
|
epNum = MULTI_EP_RESULT
|
||||||
logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(parse_result.episode_numbers), logger.DEBUG)
|
logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
|
||||||
|
parse_result.episode_numbers), logger.DEBUG)
|
||||||
elif len(epObj) == 0:
|
elif len(epObj) == 0:
|
||||||
epNum = SEASON_RESULT
|
epNum = SEASON_RESULT
|
||||||
result.extraInfo = [show]
|
result.extraInfo = [show]
|
||||||
|
@ -396,17 +410,14 @@ class GenericProvider:
|
||||||
|
|
||||||
|
|
||||||
class NZBProvider(GenericProvider):
|
class NZBProvider(GenericProvider):
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
|
|
||||||
GenericProvider.__init__(self, name)
|
GenericProvider.__init__(self, name)
|
||||||
|
|
||||||
self.providerType = GenericProvider.NZB
|
self.providerType = GenericProvider.NZB
|
||||||
|
|
||||||
|
|
||||||
class TorrentProvider(GenericProvider):
|
class TorrentProvider(GenericProvider):
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
|
|
||||||
GenericProvider.__init__(self, name)
|
GenericProvider.__init__(self, name)
|
||||||
|
|
||||||
self.providerType = GenericProvider.TORRENT
|
self.providerType = GenericProvider.TORRENT
|
||||||
|
|
|
@ -30,7 +30,6 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
class HDBitsProvider(generic.TorrentProvider):
|
class HDBitsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
generic.TorrentProvider.__init__(self, "HDBits")
|
generic.TorrentProvider.__init__(self, "HDBits")
|
||||||
|
@ -61,8 +60,10 @@ class HDBitsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
if 'status' in parsedJSON and 'message' in parsedJSON:
|
if 'status' in parsedJSON and 'message' in parsedJSON:
|
||||||
if parsedJSON.get('status') == 5:
|
if parsedJSON.get('status') == 5:
|
||||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['message'], logger.DEBUG)
|
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['message'],
|
||||||
raise AuthException("Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
logger.DEBUG)
|
||||||
|
raise AuthException(
|
||||||
|
"Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -115,16 +116,19 @@ class HDBitsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
if episode.show.air_by_date:
|
if episode.show.air_by_date:
|
||||||
if parse_result.air_date != episode.airdate:
|
if parse_result.air_date != episode.airdate:
|
||||||
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG)
|
logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
|
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
|
||||||
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG)
|
logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(
|
||||||
|
episode.episode) + ", skipping it", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
quality = self.getQuality(item)
|
quality = self.getQuality(item)
|
||||||
|
|
||||||
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
|
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
|
||||||
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG)
|
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " +
|
||||||
|
Quality.qualityStrings[quality], logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
||||||
|
@ -170,7 +174,6 @@ class HDBitsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
|
||||||
class HDBitsCache(tvcache.TVCache):
|
class HDBitsCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
@ -206,7 +209,8 @@ class HDBitsCache(tvcache.TVCache):
|
||||||
if parsedJSON and 'data' in parsedJSON:
|
if parsedJSON and 'data' in parsedJSON:
|
||||||
items = parsedJSON['data']
|
items = parsedJSON['data']
|
||||||
else:
|
else:
|
||||||
logger.log(u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it", logger.ERROR)
|
logger.log(u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it",
|
||||||
|
logger.ERROR)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
cl = []
|
cl = []
|
||||||
|
@ -220,7 +224,8 @@ class HDBitsCache(tvcache.TVCache):
|
||||||
myDB.mass_action(cl)
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise exceptions.AuthException("Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
raise exceptions.AuthException(
|
||||||
|
"Your authentication info for " + self.provider.name + " is incorrect, check your config")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
|
@ -236,10 +241,12 @@ class HDBitsCache(tvcache.TVCache):
|
||||||
logger.log(u"Adding item to results: " + title, logger.DEBUG)
|
logger.log(u"Adding item to results: " + title, logger.DEBUG)
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
else:
|
else:
|
||||||
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", logger.ERROR)
|
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable",
|
||||||
|
logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _checkAuth(self, data):
|
def _checkAuth(self, data):
|
||||||
return self.provider._checkAuthFromData(data)
|
return self.provider._checkAuthFromData(data)
|
||||||
|
|
||||||
|
|
||||||
provider = HDBitsProvider()
|
provider = HDBitsProvider()
|
||||||
|
|
|
@ -37,14 +37,14 @@ from lib import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
class HDTorrentsProvider(generic.TorrentProvider):
|
|
||||||
|
|
||||||
urls = {'base_url' : 'https://hdts.ru/index.php',
|
class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
'login' : 'https://hdts.ru/login.php',
|
urls = {'base_url': 'https://hdts.ru/index.php',
|
||||||
'detail' : 'https://www.hdts.ru/details.php?id=%s',
|
'login': 'https://hdts.ru/login.php',
|
||||||
'search' : 'https://hdts.ru/torrents.php?search=%s&active=1&options=0%s',
|
'detail': 'https://www.hdts.ru/details.php?id=%s',
|
||||||
'download' : 'https://www.sceneaccess.eu/%s',
|
'search': 'https://hdts.ru/torrents.php?search=%s&active=1&options=0%s',
|
||||||
'home' : 'https://www.hdts.ru/%s'
|
'download': 'https://www.sceneaccess.eu/%s',
|
||||||
|
'home': 'https://www.hdts.ru/%s'
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -121,7 +121,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
if searchSeason:
|
if searchSeason:
|
||||||
search_string = {'Season': [], 'Episode': []}
|
search_string = {'Season': [], 'Episode': []}
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
||||||
ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX
|
ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
for ep_obj in wantedEp:
|
for ep_obj in wantedEp:
|
||||||
|
@ -141,14 +141,15 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
if ep_obj.show.air_by_date:
|
if ep_obj.show.air_by_date:
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||||
str(ep_obj.airdate) +'|'+\
|
str(ep_obj.airdate) + '|' + \
|
||||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||||
search_string['Episode'].append(ep_string)
|
search_string['Episode'].append(ep_string)
|
||||||
else:
|
else:
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode}
|
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season,
|
||||||
|
'episodenumber': ep_obj.episode}
|
||||||
|
|
||||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||||
|
|
||||||
|
@ -170,7 +171,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
if search_string == '':
|
if search_string == '':
|
||||||
continue
|
continue
|
||||||
search_string = str(search_string).replace('.',' ')
|
search_string = str(search_string).replace('.', ' ')
|
||||||
searchURL = self.urls['search'] % (search_string, self.categories)
|
searchURL = self.urls['search'] % (search_string, self.categories)
|
||||||
|
|
||||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||||
|
@ -189,10 +190,11 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
||||||
|
|
||||||
#Get first entry in table
|
#Get first entry in table
|
||||||
entries = html.find_all('td', attrs={'align' : 'center'})
|
entries = html.find_all('td', attrs={'align': 'center'})
|
||||||
|
|
||||||
if not entries:
|
if not entries:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG)
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -217,12 +219,12 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
items[mode].append(item)
|
items[mode].append(item)
|
||||||
|
|
||||||
#Now attempt to get any others
|
#Now attempt to get any others
|
||||||
result_table = html.find('table', attrs = {'class' : 'mainblockcontenttt'})
|
result_table = html.find('table', attrs={'class': 'mainblockcontenttt'})
|
||||||
|
|
||||||
if not result_table:
|
if not result_table:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
entries = result_table.find_all('td', attrs={'align' : 'center', 'class' : 'listas'})
|
entries = result_table.find_all('td', attrs={'align': 'center', 'class': 'listas'})
|
||||||
|
|
||||||
if not entries:
|
if not entries:
|
||||||
continue
|
continue
|
||||||
|
@ -270,7 +272,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
title, url, id, seeders, leechers = item
|
title, url, id, seeders, leechers = item
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
url = str(url).replace('&','&')
|
url = str(url).replace('&', '&')
|
||||||
|
|
||||||
return (title, url)
|
return (title, url)
|
||||||
|
|
||||||
|
@ -288,11 +290,12 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
url = urlparse.urlunparse(parsed)
|
url = urlparse.urlunparse(parsed)
|
||||||
response = self.session.get(url, verify=False)
|
response = self.session.get(url, verify=False)
|
||||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||||
logger.log(u"Error loading "+self.name+" URL: " + ex(e), logger.ERROR)
|
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||||
|
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return response.content
|
return response.content
|
||||||
|
@ -301,7 +304,8 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
sqlResults = db.DBConnection().select(
|
||||||
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||||
|
@ -323,7 +327,6 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
|
||||||
class HDTorrentsCache(tvcache.TVCache):
|
class HDTorrentsCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
@ -369,4 +372,5 @@ class HDTorrentsCache(tvcache.TVCache):
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
||||||
provider = HDTorrentsProvider()
|
provider = HDTorrentsProvider()
|
||||||
|
|
|
@ -36,11 +36,11 @@ from lib import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
class IPTorrentsProvider(generic.TorrentProvider):
|
|
||||||
|
|
||||||
urls = {'base_url' : 'https://www.iptorrents.com',
|
class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
'login' : 'https://www.iptorrents.com/torrents/',
|
urls = {'base_url': 'https://www.iptorrents.com',
|
||||||
'search' : 'https://www.iptorrents.com/torrents/?%s%s&q=%s&qf=ti',
|
'login': 'https://www.iptorrents.com/torrents/',
|
||||||
|
'search': 'https://www.iptorrents.com/torrents/?%s%s&q=%s&qf=ti',
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -102,7 +102,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
if searchSeason:
|
if searchSeason:
|
||||||
search_string = {'Season': [], 'Episode': []}
|
search_string = {'Season': [], 'Episode': []}
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
||||||
ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX
|
ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
for ep_obj in wantedEp:
|
for ep_obj in wantedEp:
|
||||||
|
@ -122,14 +122,15 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
if ep_obj.show.air_by_date:
|
if ep_obj.show.air_by_date:
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||||
str(ep_obj.airdate) +'|'+\
|
str(ep_obj.airdate) + '|' + \
|
||||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||||
search_string['Episode'].append(ep_string)
|
search_string['Episode'].append(ep_string)
|
||||||
else:
|
else:
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ' %s' %add_string
|
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season,
|
||||||
|
'episodenumber': ep_obj.episode} + ' %s' % add_string
|
||||||
|
|
||||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||||
|
|
||||||
|
@ -162,19 +163,20 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
||||||
|
|
||||||
if not html:
|
if not html:
|
||||||
logger.log(u"Invalid HTML data: " + str(data) , logger.DEBUG)
|
logger.log(u"Invalid HTML data: " + str(data), logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if html.find(text='No Torrents Found!'):
|
if html.find(text='No Torrents Found!'):
|
||||||
logger.log(u"No results found for: " + search_string + " (" + searchURL + ")", logger.DEBUG)
|
logger.log(u"No results found for: " + search_string + " (" + searchURL + ")", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
torrent_table = html.find('table', attrs = {'class' : 'torrents'})
|
torrent_table = html.find('table', attrs={'class': 'torrents'})
|
||||||
torrents = torrent_table.find_all('tr') if torrent_table else []
|
torrents = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrents)<2:
|
if len(torrents) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.WARNING)
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for result in torrents[1:]:
|
for result in torrents[1:]:
|
||||||
|
@ -184,7 +186,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
torrent_name = torrent.string
|
torrent_name = torrent.string
|
||||||
torrent_download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href']
|
torrent_download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href']
|
||||||
torrent_details_url = self.urls['base_url'] + torrent['href']
|
torrent_details_url = self.urls['base_url'] + torrent['href']
|
||||||
torrent_seeders = int(result.find('td', attrs = {'class' : 'ac t_seeders'}).string)
|
torrent_seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).string)
|
||||||
## Not used, perhaps in the future ##
|
## Not used, perhaps in the future ##
|
||||||
#torrent_id = int(torrent['href'].replace('/details.php?id=', ''))
|
#torrent_id = int(torrent['href'].replace('/details.php?id=', ''))
|
||||||
#torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
|
#torrent_leechers = int(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
|
||||||
|
@ -214,7 +216,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
title, url = item
|
title, url = item
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
url = str(url).replace('&','&')
|
url = str(url).replace('&', '&')
|
||||||
|
|
||||||
return (title, url)
|
return (title, url)
|
||||||
|
|
||||||
|
@ -236,7 +238,8 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||||
|
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return response.content
|
return response.content
|
||||||
|
@ -245,7 +248,8 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
sqlResults = db.DBConnection().select(
|
||||||
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||||
|
@ -267,7 +271,6 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
|
||||||
class IPTorrentsCache(tvcache.TVCache):
|
class IPTorrentsCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
@ -313,4 +316,5 @@ class IPTorrentsCache(tvcache.TVCache):
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
||||||
provider = IPTorrentsProvider()
|
provider = IPTorrentsProvider()
|
||||||
|
|
|
@ -45,8 +45,8 @@ from lib import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
class KATProvider(generic.TorrentProvider):
|
|
||||||
|
|
||||||
|
class KATProvider(generic.TorrentProvider):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
generic.TorrentProvider.__init__(self, "KickAssTorrents")
|
generic.TorrentProvider.__init__(self, "KickAssTorrents")
|
||||||
|
@ -57,7 +57,7 @@ class KATProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
self.url = 'http://kickass.to/'
|
self.url = 'http://kickass.to/'
|
||||||
|
|
||||||
self.searchurl = self.url+'usearch/%s/?field=seeders&sorder=desc' #order by seed
|
self.searchurl = self.url + 'usearch/%s/?field=seeders&sorder=desc' #order by seed
|
||||||
|
|
||||||
def isEnabled(self):
|
def isEnabled(self):
|
||||||
return sickbeard.KAT
|
return sickbeard.KAT
|
||||||
|
@ -95,7 +95,7 @@ class KATProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
return quality_string
|
return quality_string
|
||||||
|
|
||||||
def _find_season_quality(self,title, torrent_link, ep_number):
|
def _find_season_quality(self, title, torrent_link, ep_number):
|
||||||
""" Return the modified title of a Season Torrent with the quality found inspecting torrent file list """
|
""" Return the modified title of a Season Torrent with the quality found inspecting torrent file list """
|
||||||
|
|
||||||
mediaExtensions = ['avi', 'mkv', 'wmv', 'divx',
|
mediaExtensions = ['avi', 'mkv', 'wmv', 'divx',
|
||||||
|
@ -113,18 +113,21 @@ class KATProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
soup = BeautifulSoup(data, features=["html5lib", "permissive"])
|
soup = BeautifulSoup(data, features=["html5lib", "permissive"])
|
||||||
file_table = soup.find('table', attrs = {'class': 'torrentFileList'})
|
file_table = soup.find('table', attrs={'class': 'torrentFileList'})
|
||||||
|
|
||||||
if not file_table:
|
if not file_table:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
files = [x.text for x in file_table.find_all('td', attrs = {'class' : 'torFileName'} )]
|
files = [x.text for x in file_table.find_all('td', attrs={'class': 'torFileName'})]
|
||||||
videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files)
|
videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, files)
|
||||||
|
|
||||||
#Filtering SingleEpisode/MultiSeason Torrent
|
#Filtering SingleEpisode/MultiSeason Torrent
|
||||||
if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1 ):
|
if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1):
|
||||||
logger.log(u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG)
|
logger.log(u"Result " + title + " have " + str(
|
||||||
logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG)
|
ep_number) + " episode and episodes retrived in torrent are " + str(len(videoFiles)), logger.DEBUG)
|
||||||
|
logger.log(
|
||||||
|
u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...",
|
||||||
|
logger.DEBUG)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if Quality.sceneQuality(title) != Quality.UNKNOWN:
|
if Quality.sceneQuality(title) != Quality.UNKNOWN:
|
||||||
|
@ -134,7 +137,7 @@ class KATProvider(generic.TorrentProvider):
|
||||||
quality = Quality.sceneQuality(os.path.basename(fileName))
|
quality = Quality.sceneQuality(os.path.basename(fileName))
|
||||||
if quality != Quality.UNKNOWN: break
|
if quality != Quality.UNKNOWN: break
|
||||||
|
|
||||||
if fileName!=None and quality == Quality.UNKNOWN:
|
if fileName != None and quality == Quality.UNKNOWN:
|
||||||
quality = Quality.assumeQuality(os.path.basename(fileName))
|
quality = Quality.assumeQuality(os.path.basename(fileName))
|
||||||
|
|
||||||
if quality == Quality.UNKNOWN:
|
if quality == Quality.UNKNOWN:
|
||||||
|
@ -147,10 +150,11 @@ class KATProvider(generic.TorrentProvider):
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Season quality for "+title+" is "+Quality.qualityStrings[quality], logger.DEBUG)
|
logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG)
|
||||||
|
|
||||||
if parse_result.series_name and parse_result.season_number:
|
if parse_result.series_name and parse_result.season_number:
|
||||||
title = parse_result.series_name+' S%02d' % int(parse_result.season_number)+' '+self._reverseQuality(quality)
|
title = parse_result.series_name + ' S%02d' % int(
|
||||||
|
parse_result.season_number) + ' ' + self._reverseQuality(quality)
|
||||||
|
|
||||||
return title
|
return title
|
||||||
|
|
||||||
|
@ -169,10 +173,11 @@ class KATProvider(generic.TorrentProvider):
|
||||||
if searchSeason:
|
if searchSeason:
|
||||||
search_string = {'Season': [], 'Episode': []}
|
search_string = {'Season': [], 'Episode': []}
|
||||||
for show_name in set(allPossibleShowNames(show)):
|
for show_name in set(allPossibleShowNames(show)):
|
||||||
ep_string = show_name +' S%02d' % int(season) + ' -S%02d' % int(season) + 'E' + ' category:tv' #1) ShowName SXX -SXXE
|
ep_string = show_name + ' S%02d' % int(season) + ' -S%02d' % int(
|
||||||
|
season) + 'E' + ' category:tv' #1) ShowName SXX -SXXE
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
ep_string = show_name+' Season '+str(season)+' -Ep*' + ' category:tv' #2) ShowName Season X
|
ep_string = show_name + ' Season ' + str(season) + ' -Ep*' + ' category:tv' #2) ShowName Season X
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
for ep_obj in wantedEp:
|
for ep_obj in wantedEp:
|
||||||
|
@ -194,18 +199,20 @@ class KATProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
if ep_obj.show.air_by_date:
|
if ep_obj.show.air_by_date:
|
||||||
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = sanitizeSceneName(show_name) +' '+\
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||||
str(ep_obj.airdate) +'|'+\
|
str(ep_obj.airdate) + '|' + \
|
||||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||||
|
|
||||||
search_string['Episode'].append(ep_string)
|
search_string['Episode'].append(ep_string)
|
||||||
else:
|
else:
|
||||||
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = sanitizeSceneName(show_name) +' '+\
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
|
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season,
|
||||||
sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} +'|'+\
|
'episodenumber': ep_obj.episode} + '|' + \
|
||||||
sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode} + ' %s category:tv' %add_string \
|
sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.season,
|
||||||
|
'episodenumber': ep_obj.episode} + '|' + \
|
||||||
|
sickbeard.config.naming_ep_type[3] % {'seasonnumber': ep_obj.season,
|
||||||
|
'episodenumber': ep_obj.episode} + ' %s category:tv' % add_string
|
||||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
@ -220,11 +227,11 @@ class KATProvider(generic.TorrentProvider):
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
|
||||||
if mode != 'RSS':
|
if mode != 'RSS':
|
||||||
searchURL = self.searchurl %(urllib.quote(unidecode(search_string)))
|
searchURL = self.searchurl % (urllib.quote(unidecode(search_string)))
|
||||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
searchURL = self.url + 'tv/?field=time_add&sorder=desc'
|
searchURL = self.url + 'tv/?field=time_add&sorder=desc'
|
||||||
logger.log(u"KAT cache update URL: "+ searchURL, logger.DEBUG)
|
logger.log(u"KAT cache update URL: " + searchURL, logger.DEBUG)
|
||||||
|
|
||||||
html = self.getURL(searchURL)
|
html = self.getURL(searchURL)
|
||||||
if not html:
|
if not html:
|
||||||
|
@ -233,12 +240,13 @@ class KATProvider(generic.TorrentProvider):
|
||||||
try:
|
try:
|
||||||
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
||||||
|
|
||||||
torrent_table = soup.find('table', attrs = {'class' : 'data'})
|
torrent_table = soup.find('table', attrs={'class': 'data'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows)<2:
|
if len(torrent_rows) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.WARNING)
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
|
@ -259,7 +267,9 @@ class KATProvider(generic.TorrentProvider):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if sickbeard.KAT_VERIFIED and not verified:
|
if sickbeard.KAT_VERIFIED and not verified:
|
||||||
logger.log(u"KAT Provider found result "+title+" but that doesn't seem like a verified result so I'm ignoring it",logger.DEBUG)
|
logger.log(
|
||||||
|
u"KAT Provider found result " + title + " but that doesn't seem like a verified result so I'm ignoring it",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
#Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
|
#Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
|
||||||
|
@ -275,7 +285,8 @@ class KATProvider(generic.TorrentProvider):
|
||||||
items[mode].append(item)
|
items[mode].append(item)
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||||
|
logger.ERROR)
|
||||||
|
|
||||||
#For each search mode sort all the items by seeders
|
#For each search mode sort all the items by seeders
|
||||||
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
||||||
|
@ -289,7 +300,7 @@ class KATProvider(generic.TorrentProvider):
|
||||||
title, url, id, seeders, leechers = item
|
title, url, id, seeders, leechers = item
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
url = url.replace('&','&')
|
url = url.replace('&', '&')
|
||||||
|
|
||||||
return (title, url)
|
return (title, url)
|
||||||
|
|
||||||
|
@ -303,11 +314,12 @@ class KATProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
r = requests.get(url)
|
r = requests.get(url)
|
||||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||||
logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||||
|
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return r.content
|
return r.content
|
||||||
|
@ -332,7 +344,8 @@ class KATProvider(generic.TorrentProvider):
|
||||||
if not r.status_code == 200:
|
if not r.status_code == 200:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
|
||||||
|
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||||
magnetFileContent = r.content
|
magnetFileContent = r.content
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -353,7 +366,8 @@ class KATProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' +
|
sqlResults = db.DBConnection().select(
|
||||||
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate, s.indexer FROM tv_episodes AS e' +
|
||||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||||
|
@ -375,7 +389,6 @@ class KATProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
|
||||||
class KATCache(tvcache.TVCache):
|
class KATCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
@ -421,4 +434,5 @@ class KATCache(tvcache.TVCache):
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
||||||
provider = KATProvider()
|
provider = KATProvider()
|
||||||
|
|
|
@ -20,7 +20,7 @@ import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import urllib,urlparse
|
import urllib, urlparse
|
||||||
|
|
||||||
from xml.dom.minidom import parseString
|
from xml.dom.minidom import parseString
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
@ -35,8 +35,8 @@ from sickbeard.common import Quality
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
from lib.dateutil.parser import parse as parseDate
|
from lib.dateutil.parser import parse as parseDate
|
||||||
|
|
||||||
class NewzbinDownloader(urllib.FancyURLopener):
|
|
||||||
|
|
||||||
|
class NewzbinDownloader(urllib.FancyURLopener):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
urllib.FancyURLopener.__init__(self)
|
urllib.FancyURLopener.__init__(self)
|
||||||
|
|
||||||
|
@ -63,8 +63,8 @@ class NewzbinDownloader(urllib.FancyURLopener):
|
||||||
|
|
||||||
raise exceptions.NewzbinAPIThrottled()
|
raise exceptions.NewzbinAPIThrottled()
|
||||||
|
|
||||||
class NewzbinProvider(generic.NZBProvider):
|
|
||||||
|
|
||||||
|
class NewzbinProvider(generic.NZBProvider):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
generic.NZBProvider.__init__(self, "Newzbin")
|
generic.NZBProvider.__init__(self, "Newzbin")
|
||||||
|
@ -92,7 +92,7 @@ class NewzbinProvider(generic.NZBProvider):
|
||||||
else:
|
else:
|
||||||
attr_dict[cur_attr].append(cur_attr_value)
|
attr_dict[cur_attr].append(cur_attr_value)
|
||||||
|
|
||||||
logger.log("Finding quality of item based on attributes "+str(attr_dict), logger.DEBUG)
|
logger.log("Finding quality of item based on attributes " + str(attr_dict), logger.DEBUG)
|
||||||
|
|
||||||
if self._is_SDTV(attr_dict):
|
if self._is_SDTV(attr_dict):
|
||||||
quality = Quality.SDTV
|
quality = Quality.SDTV
|
||||||
|
@ -109,14 +109,15 @@ class NewzbinProvider(generic.NZBProvider):
|
||||||
else:
|
else:
|
||||||
quality = Quality.UNKNOWN
|
quality = Quality.UNKNOWN
|
||||||
|
|
||||||
logger.log("Resulting quality: "+str(quality), logger.DEBUG)
|
logger.log("Resulting quality: " + str(quality), logger.DEBUG)
|
||||||
|
|
||||||
return quality
|
return quality
|
||||||
|
|
||||||
def _is_SDTV(self, attrs):
|
def _is_SDTV(self, attrs):
|
||||||
|
|
||||||
# Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
|
# Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
|
||||||
video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
|
video_fmt = 'Video Fmt' in attrs and (
|
||||||
|
'XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
|
||||||
and ('720p' not in attrs['Video Fmt']) \
|
and ('720p' not in attrs['Video Fmt']) \
|
||||||
and ('1080p' not in attrs['Video Fmt']) \
|
and ('1080p' not in attrs['Video Fmt']) \
|
||||||
and ('1080i' not in attrs['Video Fmt'])
|
and ('1080i' not in attrs['Video Fmt'])
|
||||||
|
@ -132,7 +133,8 @@ class NewzbinProvider(generic.NZBProvider):
|
||||||
def _is_SDDVD(self, attrs):
|
def _is_SDDVD(self, attrs):
|
||||||
|
|
||||||
# Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
|
# Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i
|
||||||
video_fmt = 'Video Fmt' in attrs and ('XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
|
video_fmt = 'Video Fmt' in attrs and (
|
||||||
|
'XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \
|
||||||
and ('720p' not in attrs['Video Fmt']) \
|
and ('720p' not in attrs['Video Fmt']) \
|
||||||
and ('1080p' not in attrs['Video Fmt']) \
|
and ('1080p' not in attrs['Video Fmt']) \
|
||||||
and ('1080i' not in attrs['Video Fmt'])
|
and ('1080i' not in attrs['Video Fmt'])
|
||||||
|
@ -207,19 +209,20 @@ class NewzbinProvider(generic.NZBProvider):
|
||||||
|
|
||||||
id = self.getIDFromURL(nzb.url)
|
id = self.getIDFromURL(nzb.url)
|
||||||
if not id:
|
if not id:
|
||||||
logger.log("Unable to get an ID from "+str(nzb.url)+", can't download from Newzbin's API", logger.ERROR)
|
logger.log("Unable to get an ID from " + str(nzb.url) + ", can't download from Newzbin's API", logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.log("Downloading an NZB from newzbin with id "+id)
|
logger.log("Downloading an NZB from newzbin with id " + id)
|
||||||
|
|
||||||
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name)+'.nzb')
|
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name) + '.nzb')
|
||||||
logger.log("Saving to " + fileName)
|
logger.log("Saving to " + fileName)
|
||||||
|
|
||||||
urllib._urlopener = NewzbinDownloader()
|
urllib._urlopener = NewzbinDownloader()
|
||||||
|
|
||||||
params = urllib.urlencode({"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id})
|
params = urllib.urlencode(
|
||||||
|
{"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id})
|
||||||
try:
|
try:
|
||||||
urllib.urlretrieve(self.url+"api/dnzb/", fileName, data=params)
|
urllib.urlretrieve(self.url + "api/dnzb/", fileName, data=params)
|
||||||
except exceptions.NewzbinAPIThrottled:
|
except exceptions.NewzbinAPIThrottled:
|
||||||
logger.log("Done waiting for Newzbin API throttle limit, starting downloads again")
|
logger.log("Done waiting for Newzbin API throttle limit, starting downloads again")
|
||||||
self.downloadResult(nzb)
|
self.downloadResult(nzb)
|
||||||
|
@ -256,13 +259,13 @@ class NewzbinProvider(generic.NZBProvider):
|
||||||
suffix = ''
|
suffix = ''
|
||||||
else:
|
else:
|
||||||
suffix = 'x'
|
suffix = 'x'
|
||||||
searchTerms = ['^"'+x+' - '+str(season)+suffix+'"' for x in nameList]
|
searchTerms = ['^"' + x + ' - ' + str(season) + suffix + '"' for x in nameList]
|
||||||
#searchTerms += ['^"'+x+' - Season '+str(season)+'"' for x in nameList]
|
#searchTerms += ['^"'+x+' - Season '+str(season)+'"' for x in nameList]
|
||||||
searchStr = " OR ".join(searchTerms)
|
searchStr = " OR ".join(searchTerms)
|
||||||
|
|
||||||
searchStr += " -subpack -extras"
|
searchStr += " -subpack -extras"
|
||||||
|
|
||||||
logger.log("Searching newzbin for string "+searchStr, logger.DEBUG)
|
logger.log("Searching newzbin for string " + searchStr, logger.DEBUG)
|
||||||
|
|
||||||
return [searchStr]
|
return [searchStr]
|
||||||
|
|
||||||
|
@ -270,9 +273,9 @@ class NewzbinProvider(generic.NZBProvider):
|
||||||
|
|
||||||
nameList = set(show_name_helpers.allPossibleShowNames(ep_obj.show))
|
nameList = set(show_name_helpers.allPossibleShowNames(ep_obj.show))
|
||||||
if not ep_obj.show.air_by_date:
|
if not ep_obj.show.air_by_date:
|
||||||
searchStr = " OR ".join(['^"'+x+' - %dx%02d"'%(ep_obj.season, ep_obj.episode) for x in nameList])
|
searchStr = " OR ".join(['^"' + x + ' - %dx%02d"' % (ep_obj.season, ep_obj.episode) for x in nameList])
|
||||||
else:
|
else:
|
||||||
searchStr = " OR ".join(['^"'+x+' - '+str(ep_obj.airdate)+'"' for x in nameList])
|
searchStr = " OR ".join(['^"' + x + ' - ' + str(ep_obj.airdate) + '"' for x in nameList])
|
||||||
return [searchStr]
|
return [searchStr]
|
||||||
|
|
||||||
def _doSearch(self, searchStr, show=None):
|
def _doSearch(self, searchStr, show=None):
|
||||||
|
@ -285,7 +288,7 @@ class NewzbinProvider(generic.NZBProvider):
|
||||||
parsedXML = parseString(data)
|
parsedXML = parseString(data)
|
||||||
items = parsedXML.getElementsByTagName('item')
|
items = parsedXML.getElementsByTagName('item')
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log("Error trying to load Newzbin RSS feed: "+ex(e), logger.ERROR)
|
logger.log("Error trying to load Newzbin RSS feed: " + ex(e), logger.ERROR)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
for cur_item in items:
|
for cur_item in items:
|
||||||
|
@ -301,7 +304,7 @@ class NewzbinProvider(generic.NZBProvider):
|
||||||
post_date = parseDate(dateString).replace(tzinfo=None)
|
post_date = parseDate(dateString).replace(tzinfo=None)
|
||||||
retention_date = datetime.now() - timedelta(days=sickbeard.USENET_RETENTION)
|
retention_date = datetime.now() - timedelta(days=sickbeard.USENET_RETENTION)
|
||||||
if post_date < retention_date:
|
if post_date < retention_date:
|
||||||
logger.log(u"Date "+str(post_date)+" is out of retention range, skipping", logger.DEBUG)
|
logger.log(u"Date " + str(post_date) + " is out of retention range, skipping", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log("Error parsing date from Newzbin RSS feed: " + str(e), logger.ERROR)
|
logger.log("Error parsing date from Newzbin RSS feed: " + str(e), logger.ERROR)
|
||||||
|
@ -350,8 +353,8 @@ class NewzbinProvider(generic.NZBProvider):
|
||||||
if sickbeard.NEWZBIN_USERNAME in (None, "") or sickbeard.NEWZBIN_PASSWORD in (None, ""):
|
if sickbeard.NEWZBIN_USERNAME in (None, "") or sickbeard.NEWZBIN_PASSWORD in (None, ""):
|
||||||
raise exceptions.AuthException("Newzbin authentication details are empty, check your config")
|
raise exceptions.AuthException("Newzbin authentication details are empty, check your config")
|
||||||
|
|
||||||
class NewzbinCache(tvcache.TVCache):
|
|
||||||
|
|
||||||
|
class NewzbinCache(tvcache.TVCache):
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
@ -374,14 +377,16 @@ class NewzbinCache(tvcache.TVCache):
|
||||||
raise exceptions.AuthException("Invalid Newzbin username/password")
|
raise exceptions.AuthException("Invalid Newzbin username/password")
|
||||||
|
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
logger.log("The XML returned from the "+self.provider.name+" feed is incomplete, this result is unusable", logger.ERROR)
|
logger.log(
|
||||||
|
"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
|
||||||
|
logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
quality = self.provider.getQuality(item)
|
quality = self.provider.getQuality(item)
|
||||||
|
|
||||||
logger.log("Found quality "+str(quality), logger.DEBUG)
|
logger.log("Found quality " + str(quality), logger.DEBUG)
|
||||||
|
|
||||||
logger.log("Adding item from RSS to cache: "+title, logger.DEBUG)
|
logger.log("Adding item from RSS to cache: " + title, logger.DEBUG)
|
||||||
|
|
||||||
self._addCacheEntry(title, url, quality=quality)
|
self._addCacheEntry(title, url, quality=quality)
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,6 @@ from sickbeard.exceptions import ex, AuthException
|
||||||
|
|
||||||
|
|
||||||
class NewznabProvider(generic.NZBProvider):
|
class NewznabProvider(generic.NZBProvider):
|
||||||
|
|
||||||
def __init__(self, name, url, key='', catIDs='5030,5040,5060'):
|
def __init__(self, name, url, key='', catIDs='5030,5040,5060'):
|
||||||
|
|
||||||
generic.NZBProvider.__init__(self, name)
|
generic.NZBProvider.__init__(self, name)
|
||||||
|
@ -73,7 +72,8 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
return self.name + '|' + self.url + '|' + self.key + '|' + self.catIDs + '|' + str(int(self.enabled))
|
return self.name + '|' + self.url + '|' + self.key + '|' + self.catIDs + '|' + str(int(self.enabled))
|
||||||
|
|
||||||
def imageName(self):
|
def imageName(self):
|
||||||
if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')):
|
if ek.ek(os.path.isfile,
|
||||||
|
ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')):
|
||||||
return self.getID() + '.png'
|
return self.getID() + '.png'
|
||||||
return 'newznab.png'
|
return 'newznab.png'
|
||||||
|
|
||||||
|
@ -155,7 +155,8 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
def _checkAuth(self):
|
def _checkAuth(self):
|
||||||
|
|
||||||
if self.needs_auth and not self.key:
|
if self.needs_auth and not self.key:
|
||||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing", logger.DEBUG)
|
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing",
|
||||||
|
logger.DEBUG)
|
||||||
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
|
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -173,9 +174,11 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
elif code == '101':
|
elif code == '101':
|
||||||
raise AuthException("Your account on " + self.name + " has been suspended, contact the administrator.")
|
raise AuthException("Your account on " + self.name + " has been suspended, contact the administrator.")
|
||||||
elif code == '102':
|
elif code == '102':
|
||||||
raise AuthException("Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
|
raise AuthException(
|
||||||
|
"Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
|
||||||
else:
|
else:
|
||||||
logger.log(u"Unknown error given from " + self.name + ": " + parsedXML.attrib['description'], logger.ERROR)
|
logger.log(u"Unknown error given from " + self.name + ": " + parsedXML.attrib['description'],
|
||||||
|
logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -237,7 +240,9 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG)
|
logger.log(u"Adding item from RSS to results: " + title, logger.DEBUG)
|
||||||
results.append(curItem)
|
results.append(curItem)
|
||||||
else:
|
else:
|
||||||
logger.log(u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable", logger.DEBUG)
|
logger.log(
|
||||||
|
u"The XML returned from the " + self.name + " RSS feed is incomplete, this result is unusable",
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
@ -248,7 +253,8 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
search_terms = ['.proper.', '.repack.']
|
search_terms = ['.proper.', '.repack.']
|
||||||
|
|
||||||
cache_results = self.cache.listPropers(search_date)
|
cache_results = self.cache.listPropers(search_date)
|
||||||
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time'])) for x in cache_results]
|
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time'])) for x in
|
||||||
|
cache_results]
|
||||||
|
|
||||||
for term in search_terms:
|
for term in search_terms:
|
||||||
for item in self._doSearch({'q': term}, max_age=4):
|
for item in self._doSearch({'q': term}, max_age=4):
|
||||||
|
@ -260,7 +266,8 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# we could probably do dateStr = descriptionStr but we want date in this format
|
# we could probably do dateStr = descriptionStr but we want date in this format
|
||||||
date_text = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', description_text).group(1)
|
date_text = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}',
|
||||||
|
description_text).group(1)
|
||||||
except:
|
except:
|
||||||
date_text = None
|
date_text = None
|
||||||
|
|
||||||
|
@ -281,7 +288,6 @@ class NewznabProvider(generic.NZBProvider):
|
||||||
|
|
||||||
|
|
||||||
class NewznabCache(tvcache.TVCache):
|
class NewznabCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
|
|
@ -107,11 +107,12 @@ class NextGenProvider(generic.TorrentProvider):
|
||||||
try:
|
try:
|
||||||
login_params = self.getLoginParams()
|
login_params = self.getLoginParams()
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
self.session.headers.update({'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20130519 Firefox/24.0)'})
|
self.session.headers.update(
|
||||||
|
{'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20130519 Firefox/24.0)'})
|
||||||
data = self.session.get(self.urls['login_page'])
|
data = self.session.get(self.urls['login_page'])
|
||||||
bs = BeautifulSoup(data.content.decode('iso-8859-1'))
|
bs = BeautifulSoup(data.content.decode('iso-8859-1'))
|
||||||
csrfraw = bs.find('form', attrs = {'id': 'login'})['action']
|
csrfraw = bs.find('form', attrs={'id': 'login'})['action']
|
||||||
output = self.session.post(self.urls['base_url']+csrfraw, data=login_params)
|
output = self.session.post(self.urls['base_url'] + csrfraw, data=login_params)
|
||||||
|
|
||||||
if self.loginSuccess(output):
|
if self.loginSuccess(output):
|
||||||
self.last_login_check = now
|
self.last_login_check = now
|
||||||
|
@ -158,8 +159,8 @@ class NextGenProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
if ep_obj.show.air_by_date:
|
if ep_obj.show.air_by_date:
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||||
str(ep_obj.airdate) +'|'+\
|
str(ep_obj.airdate) + '|' + \
|
||||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||||
search_string['Episode'].append(ep_string)
|
search_string['Episode'].append(ep_string)
|
||||||
else:
|
else:
|
||||||
|
@ -193,15 +194,16 @@ class NextGenProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
html = BeautifulSoup(data.decode('iso-8859-1'), features=["html5lib", "permissive"])
|
html = BeautifulSoup(data.decode('iso-8859-1'), features=["html5lib", "permissive"])
|
||||||
resultsTable = html.find('div', attrs = {'id' : 'torrent-table-wrapper'})
|
resultsTable = html.find('div', attrs={'id': 'torrent-table-wrapper'})
|
||||||
|
|
||||||
if not resultsTable:
|
if not resultsTable:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG)
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Collecting entries
|
# Collecting entries
|
||||||
entries_std = html.find_all('div' , attrs = {'id' : 'torrent-std'})
|
entries_std = html.find_all('div', attrs={'id': 'torrent-std'})
|
||||||
entries_sticky = html.find_all('div' , attrs = {'id' : 'torrent-sticky'})
|
entries_sticky = html.find_all('div', attrs={'id': 'torrent-sticky'})
|
||||||
|
|
||||||
entries = entries_std + entries_sticky
|
entries = entries_std + entries_sticky
|
||||||
|
|
||||||
|
@ -212,8 +214,11 @@ class NextGenProvider(generic.TorrentProvider):
|
||||||
for result in entries:
|
for result in entries:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
torrentName = ((result.find('div', attrs = {'id' :'torrent-udgivelse2-users'})).find('a'))['title']
|
torrentName = \
|
||||||
torrentId = (((result.find('div', attrs = {'id' :'torrent-download'})).find('a'))['href']).replace('download.php?id=','')
|
((result.find('div', attrs={'id': 'torrent-udgivelse2-users'})).find('a'))['title']
|
||||||
|
torrentId = (
|
||||||
|
((result.find('div', attrs={'id': 'torrent-download'})).find('a'))['href']).replace(
|
||||||
|
'download.php?id=', '')
|
||||||
torrent_name = str(torrentName)
|
torrent_name = str(torrentName)
|
||||||
torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
|
torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
|
||||||
torrent_details_url = (self.urls['detail'] % torrentId).encode('utf8')
|
torrent_details_url = (self.urls['detail'] % torrentId).encode('utf8')
|
||||||
|
@ -232,16 +237,19 @@ class NextGenProvider(generic.TorrentProvider):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
item = torrent_name, torrent_download_url
|
item = torrent_name, torrent_download_url
|
||||||
logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")", logger.DEBUG)
|
logger.log(u"Found result: " + torrent_name + " (" + torrent_details_url + ")",
|
||||||
|
logger.DEBUG)
|
||||||
items[mode].append(item)
|
items[mode].append(item)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.WARNING)
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||||
|
logger.ERROR)
|
||||||
|
|
||||||
results += items[mode]
|
results += items[mode]
|
||||||
|
|
||||||
|
@ -276,7 +284,8 @@ class NextGenProvider(generic.TorrentProvider):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||||
|
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return response.content
|
return response.content
|
||||||
|
@ -285,7 +294,8 @@ class NextGenProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
sqlResults = db.DBConnection().select(
|
||||||
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||||
|
|
|
@ -36,8 +36,8 @@ from sickbeard import tvcache
|
||||||
|
|
||||||
REMOTE_DBG = False
|
REMOTE_DBG = False
|
||||||
|
|
||||||
class NyaaProvider(generic.TorrentProvider):
|
|
||||||
|
|
||||||
|
class NyaaProvider(generic.TorrentProvider):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
generic.TorrentProvider.__init__(self, "NyaaTorrents")
|
generic.TorrentProvider.__init__(self, "NyaaTorrents")
|
||||||
|
@ -58,7 +58,7 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def getQuality(self, item, anime=False):
|
def getQuality(self, item, anime=False):
|
||||||
self.debug()
|
self.debug()
|
||||||
title = helpers.get_xml_text(item.getElementsByTagName('title')[0]).replace("/"," ")
|
title = helpers.get_xml_text(item.getElementsByTagName('title')[0]).replace("/", " ")
|
||||||
quality = Quality.sceneQuality(title, anime)
|
quality = Quality.sceneQuality(title, anime)
|
||||||
return quality
|
return quality
|
||||||
|
|
||||||
|
@ -68,6 +68,7 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
results = generic.TorrentProvider.findSeasonResults(self, show, season)
|
results = generic.TorrentProvider.findSeasonResults(self, show, season)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||||
names = []
|
names = []
|
||||||
names.extend(show_name_helpers.makeSceneShowSearchStrings(show))
|
names.extend(show_name_helpers.makeSceneShowSearchStrings(show))
|
||||||
|
@ -78,11 +79,11 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def _doSearch(self, search_string, show=None):
|
def _doSearch(self, search_string, show=None):
|
||||||
|
|
||||||
params = {"term" : search_string.encode('utf-8'),
|
params = {"term": search_string.encode('utf-8'),
|
||||||
"sort" : '2', #Sort Descending By Seeders
|
"sort": '2', #Sort Descending By Seeders
|
||||||
}
|
}
|
||||||
|
|
||||||
searchURL = self.url+'?page=rss&'+urllib.urlencode(params)
|
searchURL = self.url + '?page=rss&' + urllib.urlencode(params)
|
||||||
|
|
||||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||||
|
|
||||||
|
@ -95,8 +96,8 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
parsedXML = parseString(data)
|
parsedXML = parseString(data)
|
||||||
items = parsedXML.getElementsByTagName('item')
|
items = parsedXML.getElementsByTagName('item')
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"Error trying to load NyaaTorrents RSS feed: "+ex(e), logger.ERROR)
|
logger.log(u"Error trying to load NyaaTorrents RSS feed: " + ex(e), logger.ERROR)
|
||||||
logger.log(u"RSS data: "+data, logger.DEBUG)
|
logger.log(u"RSS data: " + data, logger.DEBUG)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
@ -106,7 +107,9 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
(title, url) = self._get_title_and_url(curItem)
|
(title, url) = self._get_title_and_url(curItem)
|
||||||
|
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable: "+data, logger.ERROR)
|
logger.log(
|
||||||
|
u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable: " + data,
|
||||||
|
logger.ERROR)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
results.append(curItem)
|
results.append(curItem)
|
||||||
|
@ -117,15 +120,15 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
return generic.TorrentProvider._get_title_and_url(self, item)
|
return generic.TorrentProvider._get_title_and_url(self, item)
|
||||||
|
|
||||||
def findEpisode (self, episode, manualSearch=False):
|
def findEpisode(self, episode, manualSearch=False):
|
||||||
|
|
||||||
self._checkAuth()
|
self._checkAuth()
|
||||||
|
|
||||||
logger.log(u"Searching "+self.name+" for " + episode.prettyName())
|
logger.log(u"Searching " + self.name + " for " + episode.prettyName())
|
||||||
|
|
||||||
self.cache.updateCache()
|
self.cache.updateCache()
|
||||||
results = self.cache.searchCache(episode, manualSearch)
|
results = self.cache.searchCache(episode, manualSearch)
|
||||||
logger.log(u"Cache results: "+str(results), logger.DEBUG)
|
logger.log(u"Cache results: " + str(results), logger.DEBUG)
|
||||||
|
|
||||||
# if we got some results then use them no matter what.
|
# if we got some results then use them no matter what.
|
||||||
# OR
|
# OR
|
||||||
|
@ -147,25 +150,31 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
myParser = NameParser(show=episode.show)
|
myParser = NameParser(show=episode.show)
|
||||||
parse_result = myParser.parse(title)
|
parse_result = myParser.parse(title)
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
logger.log(u"Unable to parse the filename "+title+" into a valid episode", logger.WARNING)
|
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if episode.show.air_by_date:
|
if episode.show.air_by_date:
|
||||||
if parse_result.air_date != episode.airdate:
|
if parse_result.air_date != episode.airdate:
|
||||||
logger.log("Episode "+title+" didn't air on "+str(episode.airdate)+", skipping it", logger.DEBUG)
|
logger.log("Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
elif episode.show.anime and episode.show.absolute_numbering:
|
elif episode.show.anime and episode.show.absolute_numbering:
|
||||||
if episode.absolute_number not in parse_result.ab_episode_numbers:
|
if episode.absolute_number not in parse_result.ab_episode_numbers:
|
||||||
logger.log("Episode "+title+" isn't "+str(episode.absolute_number)+", skipping it", logger.DEBUG)
|
logger.log("Episode " + title + " isn't " + str(episode.absolute_number) + ", skipping it",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
|
elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers:
|
||||||
logger.log("Episode "+title+" isn't "+str(episode.season)+"x"+str(episode.episode)+", skipping it", logger.DEBUG)
|
logger.log(
|
||||||
|
"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
quality = self.getQuality(item, episode.show.anime)
|
quality = self.getQuality(item, episode.show.anime)
|
||||||
|
|
||||||
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
|
if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch):
|
||||||
logger.log(u"Ignoring result "+title+" because we don't want an episode that is "+Quality.qualityStrings[quality], logger.DEBUG)
|
logger.log(
|
||||||
|
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
|
||||||
|
quality], logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
|
||||||
|
@ -181,7 +190,7 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
def _extract_name_from_filename(self, filename):
|
def _extract_name_from_filename(self, filename):
|
||||||
name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$'
|
name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$'
|
||||||
logger.log(u"Comparing "+name_regex+" against "+filename, logger.DEBUG)
|
logger.log(u"Comparing " + name_regex + " against " + filename, logger.DEBUG)
|
||||||
match = re.match(name_regex, filename, re.I)
|
match = re.match(name_regex, filename, re.I)
|
||||||
if match:
|
if match:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
|
@ -189,9 +198,7 @@ class NyaaProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
|
||||||
class NyaaCache(tvcache.TVCache):
|
class NyaaCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
|
||||||
# only poll NyaaTorrents every 15 minutes max
|
# only poll NyaaTorrents every 15 minutes max
|
||||||
|
@ -199,30 +206,30 @@ class NyaaCache(tvcache.TVCache):
|
||||||
|
|
||||||
|
|
||||||
def _getRSSData(self):
|
def _getRSSData(self):
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
"page" : 'rss', # Use RSS page
|
"page": 'rss', # Use RSS page
|
||||||
"order" : '1' #Sort Descending By Date
|
"order": '1' #Sort Descending By Date
|
||||||
}
|
}
|
||||||
|
|
||||||
url = self.provider.url + '?' + urllib.urlencode(params)
|
url = self.provider.url + '?' + urllib.urlencode(params)
|
||||||
|
|
||||||
logger.log(u"NyaaTorrents cache update URL: "+ url, logger.DEBUG)
|
logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG)
|
||||||
|
|
||||||
data = self.provider.getURL(url)
|
data = self.provider.getURL(url)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
|
|
||||||
(title, url) = self.provider._get_title_and_url(item)
|
(title, url) = self.provider._get_title_and_url(item)
|
||||||
|
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable", logger.ERROR)
|
logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable",
|
||||||
|
logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Adding item from RSS to cache: "+title, logger.DEBUG)
|
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
||||||
provider = NyaaProvider()
|
provider = NyaaProvider()
|
|
@ -34,8 +34,8 @@ from sickbeard import exceptions, logger
|
||||||
from sickbeard import tvcache
|
from sickbeard import tvcache
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
class NZBsProvider(generic.NZBProvider):
|
|
||||||
|
|
||||||
|
class NZBsProvider(generic.NZBProvider):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
generic.NZBProvider.__init__(self, "NZBs.org Old")
|
generic.NZBProvider.__init__(self, "NZBs.org Old")
|
||||||
|
@ -54,10 +54,10 @@ class NZBsProvider(generic.NZBProvider):
|
||||||
raise exceptions.AuthException("NZBs.org authentication details are empty, check your config")
|
raise exceptions.AuthException("NZBs.org authentication details are empty, check your config")
|
||||||
|
|
||||||
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
def _get_season_search_strings(self, show, season, wantedEp, searchSeason=False):
|
||||||
return ['^'+x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)]
|
return ['^' + x for x in show_name_helpers.makeSceneSeasonSearchString(show, season)]
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj):
|
def _get_episode_search_strings(self, ep_obj):
|
||||||
return ['^'+x for x in show_name_helpers.makeSceneSearchString(ep_obj)]
|
return ['^' + x for x in show_name_helpers.makeSceneSearchString(ep_obj)]
|
||||||
|
|
||||||
def _doSearch(self, curString, show=None):
|
def _doSearch(self, curString, show=None):
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ class NZBsProvider(generic.NZBProvider):
|
||||||
parsedXML = parseString(data)
|
parsedXML = parseString(data)
|
||||||
items = parsedXML.getElementsByTagName('item')
|
items = parsedXML.getElementsByTagName('item')
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"Error trying to load NZBs.org RSS feed: "+ex(e), logger.ERROR)
|
logger.log(u"Error trying to load NZBs.org RSS feed: " + ex(e), logger.ERROR)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
@ -97,11 +97,14 @@ class NZBsProvider(generic.NZBProvider):
|
||||||
(title, url) = self._get_title_and_url(curItem)
|
(title, url) = self._get_title_and_url(curItem)
|
||||||
|
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
logger.log(u"The XML returned from the NZBs.org RSS feed is incomplete, this result is unusable: "+data, logger.ERROR)
|
logger.log(
|
||||||
|
u"The XML returned from the NZBs.org RSS feed is incomplete, this result is unusable: " + data,
|
||||||
|
logger.ERROR)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if "&i=" not in url and "&h=" not in url:
|
if "&i=" not in url and "&h=" not in url:
|
||||||
raise exceptions.AuthException("The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config")
|
raise exceptions.AuthException(
|
||||||
|
"The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config")
|
||||||
|
|
||||||
results.append(curItem)
|
results.append(curItem)
|
||||||
|
|
||||||
|
@ -131,10 +134,9 @@ class NZBsProvider(generic.NZBProvider):
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
class NZBsCache(tvcache.TVCache):
|
class NZBsCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
|
||||||
# only poll NZBs.org every 15 minutes max
|
# only poll NZBs.org every 15 minutes max
|
||||||
|
@ -151,7 +153,7 @@ class NZBsCache(tvcache.TVCache):
|
||||||
|
|
||||||
url += urllib.urlencode(urlArgs)
|
url += urllib.urlencode(urlArgs)
|
||||||
|
|
||||||
logger.log(u"NZBs cache update URL: "+ url, logger.DEBUG)
|
logger.log(u"NZBs cache update URL: " + url, logger.DEBUG)
|
||||||
|
|
||||||
data = self.provider.getURL(url)
|
data = self.provider.getURL(url)
|
||||||
|
|
||||||
|
@ -159,6 +161,8 @@ class NZBsCache(tvcache.TVCache):
|
||||||
|
|
||||||
def _checkItemAuth(self, title, url):
|
def _checkItemAuth(self, title, url):
|
||||||
if "&i=" not in url and "&h=" not in url:
|
if "&i=" not in url and "&h=" not in url:
|
||||||
raise exceptions.AuthException("The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config")
|
raise exceptions.AuthException(
|
||||||
|
"The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config")
|
||||||
|
|
||||||
|
|
||||||
provider = NZBsProvider()
|
provider = NZBsProvider()
|
|
@ -30,7 +30,6 @@ from sickbeard import tvcache, show_name_helpers
|
||||||
|
|
||||||
|
|
||||||
class NZBsRUSProvider(generic.NZBProvider):
|
class NZBsRUSProvider(generic.NZBProvider):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
generic.NZBProvider.__init__(self, "NZBs'R'US")
|
generic.NZBProvider.__init__(self, "NZBs'R'US")
|
||||||
self.cache = NZBsRUSCache(self)
|
self.cache = NZBsRUSCache(self)
|
||||||
|
@ -98,7 +97,6 @@ class NZBsRUSProvider(generic.NZBProvider):
|
||||||
|
|
||||||
|
|
||||||
class NZBsRUSCache(tvcache.TVCache):
|
class NZBsRUSCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
# only poll NZBs'R'US every 15 minutes max
|
# only poll NZBs'R'US every 15 minutes max
|
||||||
|
@ -119,4 +117,5 @@ class NZBsRUSCache(tvcache.TVCache):
|
||||||
def _checkAuth(self, data):
|
def _checkAuth(self, data):
|
||||||
return data != 'Invalid Link'
|
return data != 'Invalid Link'
|
||||||
|
|
||||||
|
|
||||||
provider = NZBsRUSProvider()
|
provider = NZBsRUSProvider()
|
||||||
|
|
|
@ -40,7 +40,6 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
class OmgwtfnzbsProvider(generic.NZBProvider):
|
class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
generic.NZBProvider.__init__(self, "omgwtfnzbs")
|
generic.NZBProvider.__init__(self, "omgwtfnzbs")
|
||||||
self.cache = OmgwtfnzbsCache(self)
|
self.cache = OmgwtfnzbsCache(self)
|
||||||
|
@ -73,8 +72,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
description_text = parsedJSON.get('notice')
|
description_text = parsedJSON.get('notice')
|
||||||
|
|
||||||
if 'information is incorrect' in parsedJSON.get('notice'):
|
if 'information is incorrect' in parsedJSON.get('notice'):
|
||||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text), logger.DEBUG)
|
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(description_text),
|
||||||
raise AuthException("Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
logger.DEBUG)
|
||||||
|
raise AuthException(
|
||||||
|
"Your authentication credentials for " + self.name + " are incorrect, check your config.")
|
||||||
|
|
||||||
elif '0 results matched your terms' in parsedJSON.get('notice'):
|
elif '0 results matched your terms' in parsedJSON.get('notice'):
|
||||||
return True
|
return True
|
||||||
|
@ -156,7 +157,6 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
||||||
|
|
||||||
|
|
||||||
class OmgwtfnzbsCache(tvcache.TVCache):
|
class OmgwtfnzbsCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
self.minTime = 20
|
self.minTime = 20
|
||||||
|
@ -182,4 +182,5 @@ class OmgwtfnzbsCache(tvcache.TVCache):
|
||||||
def _checkAuth(self, parsedXML):
|
def _checkAuth(self, parsedXML):
|
||||||
return self.provider._checkAuthFromData(parsedXML)
|
return self.provider._checkAuthFromData(parsedXML)
|
||||||
|
|
||||||
|
|
||||||
provider = OmgwtfnzbsProvider()
|
provider = OmgwtfnzbsProvider()
|
||||||
|
|
|
@ -43,8 +43,8 @@ from lib import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
class PublicHDProvider(generic.TorrentProvider):
|
|
||||||
|
|
||||||
|
class PublicHDProvider(generic.TorrentProvider):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
generic.TorrentProvider.__init__(self, "PublicHD")
|
generic.TorrentProvider.__init__(self, "PublicHD")
|
||||||
|
@ -81,10 +81,10 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
if searchSeason:
|
if searchSeason:
|
||||||
search_string = {'Season': [], 'Episode': []}
|
search_string = {'Season': [], 'Episode': []}
|
||||||
for show_name in set(allPossibleShowNames(show)):
|
for show_name in set(allPossibleShowNames(show)):
|
||||||
ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX -SXXE
|
ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX -SXXE
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
ep_string = show_name+' Season ' + str(season) #2) ShowName Season X
|
ep_string = show_name + ' Season ' + str(season) #2) ShowName Season X
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
for ep_obj in wantedEp:
|
for ep_obj in wantedEp:
|
||||||
|
@ -106,17 +106,18 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
if ep_obj.show.air_by_date:
|
if ep_obj.show.air_by_date:
|
||||||
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = sanitizeSceneName(show_name) +' '+ \
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||||
str(ep_obj.airdate) +'|'+\
|
str(ep_obj.airdate) + '|' + \
|
||||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||||
search_string['Episode'].append(ep_string)
|
search_string['Episode'].append(ep_string)
|
||||||
else:
|
else:
|
||||||
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
for show_name in set(allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode}
|
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season,
|
||||||
|
'episodenumber': ep_obj.episode}
|
||||||
|
|
||||||
for x in add_string.split('|'):
|
for x in add_string.split('|'):
|
||||||
to_search = re.sub('\s+', ' ', ep_string + ' %s' %x)
|
to_search = re.sub('\s+', ' ', ep_string + ' %s' % x)
|
||||||
search_string['Episode'].append(to_search)
|
search_string['Episode'].append(to_search)
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
@ -130,10 +131,12 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
for search_string in search_params[mode]:
|
for search_string in search_params[mode]:
|
||||||
|
|
||||||
if mode == 'RSS':
|
if mode == 'RSS':
|
||||||
searchURL = self.url + 'index.php?page=torrents&active=1&category=%s' %(';'.join(self.categories[mode]))
|
searchURL = self.url + 'index.php?page=torrents&active=1&category=%s' % (
|
||||||
logger.log(u"PublicHD cache update URL: "+ searchURL, logger.DEBUG)
|
';'.join(self.categories[mode]))
|
||||||
|
logger.log(u"PublicHD cache update URL: " + searchURL, logger.DEBUG)
|
||||||
else:
|
else:
|
||||||
searchURL = self.searchurl %(urllib.quote(unidecode(search_string)), ';'.join(self.categories[mode]))
|
searchURL = self.searchurl % (
|
||||||
|
urllib.quote(unidecode(search_string)), ';'.join(self.categories[mode]))
|
||||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||||
|
|
||||||
html = self.getURL(searchURL)
|
html = self.getURL(searchURL)
|
||||||
|
@ -143,19 +146,20 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
try:
|
try:
|
||||||
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
||||||
|
|
||||||
torrent_table = soup.find('table', attrs = {'id' : 'torrbg'})
|
torrent_table = soup.find('table', attrs={'id': 'torrbg'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows)<2:
|
if len(torrent_rows) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG)
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for tr in torrent_rows[1:]:
|
for tr in torrent_rows[1:]:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
link = self.url + tr.find(href=re.compile('page=torrent-details'))['href']
|
link = self.url + tr.find(href=re.compile('page=torrent-details'))['href']
|
||||||
title = tr.find(lambda x: x.has_attr('title')).text.replace('_','.')
|
title = tr.find(lambda x: x.has_attr('title')).text.replace('_', '.')
|
||||||
url = tr.find(href=re.compile('magnet+'))['href']
|
url = tr.find(href=re.compile('magnet+'))['href']
|
||||||
seeders = int(tr.find_all('td', {'class': 'header'})[4].text)
|
seeders = int(tr.find_all('td', {'class': 'header'})[4].text)
|
||||||
leechers = int(tr.find_all('td', {'class': 'header'})[5].text)
|
leechers = int(tr.find_all('td', {'class': 'header'})[5].text)
|
||||||
|
@ -173,7 +177,8 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
items[mode].append(item)
|
items[mode].append(item)
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||||
|
logger.ERROR)
|
||||||
|
|
||||||
#For each search mode sort all the items by seeders
|
#For each search mode sort all the items by seeders
|
||||||
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
||||||
|
@ -187,7 +192,7 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
title, url, id, seeders, leechers = item
|
title, url, id, seeders, leechers = item
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
url = url.replace('&','&')
|
url = url.replace('&', '&')
|
||||||
|
|
||||||
return (title, url)
|
return (title, url)
|
||||||
|
|
||||||
|
@ -201,11 +206,12 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
r = requests.get(url, verify=False)
|
r = requests.get(url, verify=False)
|
||||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||||
logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||||
|
r.status_code) + ': ' + clients.http_error_code[r.status_code], logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return r.content
|
return r.content
|
||||||
|
@ -230,7 +236,8 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
if not r.status_code == 200:
|
if not r.status_code == 200:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
magnetFileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
|
||||||
|
helpers.sanitizeFileName(result.name) + '.' + self.providerType)
|
||||||
magnetFileContent = r.content
|
magnetFileContent = r.content
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -250,7 +257,8 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
sqlResults = db.DBConnection().select(
|
||||||
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||||
|
@ -272,7 +280,6 @@ class PublicHDProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
|
||||||
class PublicHDCache(tvcache.TVCache):
|
class PublicHDCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
@ -318,4 +325,5 @@ class PublicHDCache(tvcache.TVCache):
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
||||||
provider = PublicHDProvider()
|
provider = PublicHDProvider()
|
||||||
|
|
|
@ -35,8 +35,8 @@ from lib import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from lib.bencode import bdecode
|
from lib.bencode import bdecode
|
||||||
|
|
||||||
class TorrentRssProvider(generic.TorrentProvider):
|
|
||||||
|
|
||||||
|
class TorrentRssProvider(generic.TorrentProvider):
|
||||||
def __init__(self, name, url):
|
def __init__(self, name, url):
|
||||||
|
|
||||||
generic.TorrentProvider.__init__(self, name)
|
generic.TorrentProvider.__init__(self, name)
|
||||||
|
@ -50,7 +50,8 @@ class TorrentRssProvider(generic.TorrentProvider):
|
||||||
return self.name + '|' + self.url + '|' + str(int(self.enabled))
|
return self.name + '|' + self.url + '|' + str(int(self.enabled))
|
||||||
|
|
||||||
def imageName(self):
|
def imageName(self):
|
||||||
if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')):
|
if ek.ek(os.path.isfile,
|
||||||
|
ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')):
|
||||||
return self.getID() + '.png'
|
return self.getID() + '.png'
|
||||||
return 'torrentrss.png'
|
return 'torrentrss.png'
|
||||||
|
|
||||||
|
@ -71,7 +72,6 @@ class TorrentRssProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
lambda: helpers.get_xml_text(item.find('link'))]
|
lambda: helpers.get_xml_text(item.find('link'))]
|
||||||
|
|
||||||
|
|
||||||
for cur_attempt in attempt_list:
|
for cur_attempt in attempt_list:
|
||||||
try:
|
try:
|
||||||
url = cur_attempt()
|
url = cur_attempt()
|
||||||
|
@ -132,14 +132,15 @@ class TorrentRssProvider(generic.TorrentProvider):
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
url = urljoin(url, urlparse(url).path.replace('//','/'))
|
url = urljoin(url, urlparse(url).path.replace('//', '/'))
|
||||||
response = self.session.get(url, verify=False)
|
response = self.session.get(url, verify=False)
|
||||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||||
logger.log(u"Error loading "+self.name+" URL: " + ex(e), logger.ERROR)
|
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||||
|
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return response.content
|
return response.content
|
||||||
|
@ -159,8 +160,8 @@ class TorrentRssProvider(generic.TorrentProvider):
|
||||||
logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.MESSAGE)
|
logger.log(u"Saved custom_torrent html dump " + dumpName + " ", logger.MESSAGE)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class TorrentRssCache(tvcache.TVCache):
|
|
||||||
|
|
||||||
|
class TorrentRssCache(tvcache.TVCache):
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
|
|
@ -37,14 +37,14 @@ from lib import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from lib.unidecode import unidecode
|
from lib.unidecode import unidecode
|
||||||
|
|
||||||
class SCCProvider(generic.TorrentProvider):
|
|
||||||
|
|
||||||
urls = {'base_url' : 'https://sceneaccess.eu',
|
class SCCProvider(generic.TorrentProvider):
|
||||||
'login' : 'https://sceneaccess.eu/login',
|
urls = {'base_url': 'https://sceneaccess.eu',
|
||||||
'detail' : 'https://www.sceneaccess.eu/details?id=%s',
|
'login': 'https://sceneaccess.eu/login',
|
||||||
'search' : 'https://sceneaccess.eu/browse?search=%s&method=1&%s',
|
'detail': 'https://www.sceneaccess.eu/details?id=%s',
|
||||||
'archive' : 'https://sceneaccess.eu/archive?search=%s&method=1&c26=26',
|
'search': 'https://sceneaccess.eu/browse?search=%s&method=1&%s',
|
||||||
'download' : 'https://www.sceneaccess.eu/%s',
|
'archive': 'https://sceneaccess.eu/archive?search=%s&method=1&c26=26',
|
||||||
|
'download': 'https://www.sceneaccess.eu/%s',
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -84,7 +84,7 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
try:
|
try:
|
||||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' +ex(e), logger.ERROR)
|
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if re.search('Username or password incorrect', response.text) \
|
if re.search('Username or password incorrect', response.text) \
|
||||||
|
@ -106,7 +106,7 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
if searchSeason:
|
if searchSeason:
|
||||||
search_string = {'Season': [], 'Episode': []}
|
search_string = {'Season': [], 'Episode': []}
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(show)):
|
||||||
ep_string = show_name +' S%02d' % int(season) #1) ShowName SXX
|
ep_string = show_name + ' S%02d' % int(season) #1) ShowName SXX
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
for ep_obj in wantedEp:
|
for ep_obj in wantedEp:
|
||||||
|
@ -126,14 +126,15 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
if ep_obj.show.air_by_date:
|
if ep_obj.show.air_by_date:
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||||
str(ep_obj.airdate) +'|'+\
|
str(ep_obj.airdate) + '|' + \
|
||||||
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
helpers.custom_strftime('%Y %b {S}', ep_obj.airdate)
|
||||||
search_string['Episode'].append(ep_string)
|
search_string['Episode'].append(ep_string)
|
||||||
else:
|
else:
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
|
||||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) +' '+ \
|
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season, 'episodenumber': ep_obj.episode}
|
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.season,
|
||||||
|
'episodenumber': ep_obj.episode}
|
||||||
|
|
||||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||||
|
|
||||||
|
@ -167,24 +168,25 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
try:
|
try:
|
||||||
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
||||||
|
|
||||||
torrent_table = html.find('table', attrs = {'id' : 'torrents-table'})
|
torrent_table = html.find('table', attrs={'id': 'torrents-table'})
|
||||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||||
|
|
||||||
#Continue only if one Release is found
|
#Continue only if one Release is found
|
||||||
if len(torrent_rows)<2:
|
if len(torrent_rows) < 2:
|
||||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent", logger.DEBUG)
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||||
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for result in torrent_table.find_all('tr')[1:]:
|
for result in torrent_table.find_all('tr')[1:]:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
link = result.find('td', attrs = {'class' : 'ttr_name'}).find('a')
|
link = result.find('td', attrs={'class': 'ttr_name'}).find('a')
|
||||||
url = result.find('td', attrs = {'class' : 'td_dl'}).find('a')
|
url = result.find('td', attrs={'class': 'td_dl'}).find('a')
|
||||||
title = link.string
|
title = link.string
|
||||||
download_url = self.urls['download'] % url['href']
|
download_url = self.urls['download'] % url['href']
|
||||||
id = int(link['href'].replace('details?id=', ''))
|
id = int(link['href'].replace('details?id=', ''))
|
||||||
seeders = int(result.find('td', attrs = {'class' : 'ttr_seeders'}).string)
|
seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
|
||||||
leechers = int(result.find('td', attrs = {'class' : 'ttr_leechers'}).string)
|
leechers = int(result.find('td', attrs={'class': 'ttr_leechers'}).string)
|
||||||
except (AttributeError, TypeError):
|
except (AttributeError, TypeError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -214,7 +216,7 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
title, url, id, seeders, leechers = item
|
title, url, id, seeders, leechers = item
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
url = str(url).replace('&','&')
|
url = str(url).replace('&', '&')
|
||||||
|
|
||||||
return (title, url)
|
return (title, url)
|
||||||
|
|
||||||
|
@ -234,11 +236,12 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
response = self.session.get(url, verify=False)
|
response = self.session.get(url, verify=False)
|
||||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||||
logger.log(u"Error loading "+self.name+" URL: " + ex(e), logger.ERROR)
|
logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
logger.log(self.name + u" page requested with url " + url +" returned status code is " + str(response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
logger.log(self.name + u" page requested with url " + url + " returned status code is " + str(
|
||||||
|
response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return response.content
|
return response.content
|
||||||
|
@ -247,7 +250,8 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
sqlResults = db.DBConnection().select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
sqlResults = db.DBConnection().select(
|
||||||
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||||
|
@ -269,7 +273,6 @@ class SCCProvider(generic.TorrentProvider):
|
||||||
|
|
||||||
|
|
||||||
class SCCCache(tvcache.TVCache):
|
class SCCCache(tvcache.TVCache):
|
||||||
|
|
||||||
def __init__(self, provider):
|
def __init__(self, provider):
|
||||||
|
|
||||||
tvcache.TVCache.__init__(self, provider)
|
tvcache.TVCache.__init__(self, provider)
|
||||||
|
@ -315,4 +318,5 @@ class SCCCache(tvcache.TVCache):
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
||||||
provider = SCCProvider()
|
provider = SCCProvider()
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue